From 6ac07243621960fcc9f6bdb75ef9f6e6e7820aaa Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 18 Aug 2019 01:29:24 +0530 Subject: [PATCH 001/141] initial --- scripts/run_frontend_tests.py | 86 ++++++++++++ scripts/setup.py | 258 ++++++++++++++++++++++++++++++++++ 2 files changed, 344 insertions(+) create mode 100644 scripts/run_frontend_tests.py create mode 100644 scripts/setup.py diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py new file mode 100644 index 000000000000..53d8f958b60a --- /dev/null +++ b/scripts/run_frontend_tests.py @@ -0,0 +1,86 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +########################################################################## + +# INSTRUCTIONS: +# +# Run this script from the oppia root folder: +# bash scripts/run_frontend_tests.sh +# +# Optional arguments: +# --skip-install=true/false If true, skips installing dependencies. The +# default value is false. +# --run-minified-tests=true/false Whether to run frontend karma tests on both +# minified and non-minified code. The default value is false. +# +# The root folder MUST be named 'oppia'. +# It runs unit tests for frontend JavaScript code (using Karma). +# +# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a +# single test or test suite. + +import argparse +import os +import subprocess + +from . import build + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--generate_coverage_report', + help='optional; if specified, generates a coverage report', + action='store_true') +os.environ['DEFAULT_SKIP_INSTALLING_THIRD_PARTY_LIBS'] = 'false' +os.environ['DEFAULT_RUN_MINIFIED_TESTS'] = 'false' +maybeInstallDependencies "$@" +XVFB_PREFIX='/usr/bin/xvfb-run' + + +def main(): + """Runs the frontend tests.""" + print '' + print ' View interactive frontend test coverage reports by navigating to' + print '' + print ' ../karma_coverage_reports' + print '' + print ' on your filesystem.' + print '' + print '' + print ' Running test in development environment' + print '' + + build.build() + + start_tests_cmd = ( + '%s node_modules/karma/bin/karma start core/tests/karma.conf.ts' + % XVFB_PREFIX) + subprocess.call(start_tests_cmd) + + if os.environ['RUN_MINIFIED_TESTS'] == "true": + print '' + print ' Running test in production environment' + print '' + + os.system('scripts/build.py prod_env minify_third_party_libs_only') + + start_tests_cmd = ( + '%s node_modules/karma/bin/karma start ' + 'core/tests/karma.conf.ts --prodEnv' % XVFB_PREFIX) + subprocess.call(start_tests_cmd) + + print 'Done!' + +if __name__ == '__main__': + main() diff --git a/scripts/setup.py b/scripts/setup.py new file mode 100644 index 000000000000..f416a644d1fb --- /dev/null +++ b/scripts/setup.py @@ -0,0 +1,258 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +########################################################################## + +# This file should not be invoked directly, but sourced from other sh scripts. +# Bash execution environent set up for all scripts. + +import argparse +import os +import shutil +import subprocess +import sys +import tarfile + +from . import build + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--skip_install', + help='optional; if specified, skips installing dependencies', + action='store_true') +_PARSER.add_argument( + '--run_minified_tests', + help='optional; if specified, runs frontend karma tests on both minified ' + 'and non-minified code', + action='store_true') +_PARSER.add_argument( + '--nojsrepl', + help='optional; if specified, skips installation of skulpt.', + action='store_true') +_PARSER.add_argument( + '--noskulpt', + help='optional; if specified, skips installation of skulpt.', + action='store_true') + + +def delete_directory_tree(directory_path): + """Recursively delete an existing directory tree. Does not do anything if + directory does not exists. + + Args: + directory_path: str. Directory path to be deleted. + """ + if not os.path.exists(directory_path): + return + shutil.rmtree(directory_path) + + +def maybeInstallDependencies( + default_skip_installing_third_party_libs, default_run_minified_tests): + # Parse additional command line arguments. + SKIP_INSTALLING_THIRD_PARTY_LIBS = default_skip_installing_third_party_libs + RUN_MINIFIED_TESTS = default_run_minified_tests + parsed_args = _PARSER.parse_args() + SKIP_INSTALLING_THIRD_PARTY_LIBS = parsed_args.skip_install + RUN_MINIFIED_TESTS = parsed_args.run_minified_tests + + + if SKIP_INSTALLING_THIRD_PARTY_LIBS == 'false': + # Install third party dependencies + subprocess.call('scripts/install_third_party.sh', shell=True) + + # Ensure that generated JS and CSS files are in place before running the + # tests. + print '' + print ' Running build task with concatenation only ' + print '' + + build.build() + + if RUN_MINIFIED_TESTS == 'true': + print '' + print ' Running build task with concatenation and minification' + print '' + + subprocess.call('scripts/build.py --prod_env'.split()) + + +if [ '$SETUP_DONE' ]; then + print 'Environment setup completed.' + return 0 +fi + + if parsed_args.nojsrepl or parsed_args.noskulpt: + NO_SKULPT=true + +export NO_SKULPT + +EXPECTED_PWD='oppia' +# The second option allows this script to also be run from deployment folders. +if not os.getcwd().endswith(EXPECTED_PWD) and not os.getcwd().endswith( + 'deploy-'): + print '' + print ' WARNING This script should be run from the oppia/ root folder.' + print '' + sys.exit(1) + +# Set COMMON_DIR to the absolute path of the directory above OPPIA_DIR. This +# is necessary becaue COMMON_DIR (or subsequent variables which refer to it) +# may use it in a situation where relative paths won't work as expected (such +# as $PYTHONPATH). +CURR_DIR = os.path.abspath(os.getcwd()) +OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') +export COMMON_DIR=$(cd $OPPIA_DIR/..; pwd) +export TOOLS_DIR=$COMMON_DIR/oppia_tools +export THIRD_PARTY_DIR=$OPPIA_DIR/third_party +export NODE_MODULE_DIR=$OPPIA_DIR/node_modules +export ME=$(whoami) + +os.mkdir(OPPIA_TOOLS_DIR) +os.mkdir('third_party/') +os.mkdir('node_modules/') + +# Adjust the path to include a reference to node. +NODE_PATH = os.path.join(TOOLS_DIR, 'node-10.15.3') +export PATH=$NODE_PATH/bin:$PATH +export MACHINE_TYPE=`uname -m` +export OS=`uname` + +os_info = os.uname() +if os_info[0] != 'Darwin' and os_info[0] != 'Linux': + # Node is a requirement for all installation scripts. Here, we check if the + # OS supports node.js installation; if not, we exit with an error. + print '' + print ' WARNING: Unsupported OS for installation of node.js.' + print ' If you are running this script on Windows, see the instructions' + print ' here regarding installation of node.js:' + print '' + print ' https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows%29' + print '' + print ' STATUS: Installation completed except for node.js. Exiting.' + print '' + sys.exit(1) +else + # Otherwise, npm will be installed locally, in NODE_PATH. + export NPM_CMD=$NODE_PATH/bin/npm +fi + +export NPM_INSTALL='$NPM_CMD install' + +# Download and install node.js. +print 'Checking if node.js is installed in %s' % TOOLS_DIR +if not os.path.exists(NODE_PATH): + print 'Installing Node.js' + if os_info[0] == 'Darwin': + if os_info[4] == 'x86_64': + NODE_FILE_NAME = 'node-v10.15.3-darwin-x64' + else + NODE_FILE_NAME = 'node-v10.15.3-darwin-x86' + elif os_info[0] == 'Linux': + if os_info[4] == 'x86_64': + NODE_FILE_NAME = 'node-v10.15.3-linux-x64' + else + NODE_FILE_NAME = 'node-v10.15.3-linux-x86' + +urllib.urlretrieve( + 'https://nodejs.org/dist/v10.15.3/%s.tar.gz' % NODE_FILE_NAME, + filename='node-download.tgz') +tar = tarfile.open('node-download.tgz') +tar.extractall(path=TOOLS_DIR) +tar.close() +delete_directory_tree('node-download.tgz') + + # Change ownership of $NODE_MODULE_DIR. + # Note: on some machines, these commands seem to take quite a long time. + os.chown('node_modules/', os.getuid(), -1) + os.chmod('node_modules/', 744) + +# Adjust path to support the default Chrome locations for Unix, Windows and Mac OS. +if [ '$TRAVIS' == true ]; then + export CHROME_BIN='/usr/bin/chromium-browser' +elif [ '$VAGRANT' == true ] || [ -f '/etc/is_vagrant_vm' ]; then + # XVFB is required for headless testing in Vagrant + sudo apt-get install xvfb chromium-browser + export CHROME_BIN='/usr/bin/chromium-browser' + # Used in frontend and e2e tests. Only gets set if using Vagrant VM. + export XVFB_PREFIX='/usr/bin/xvfb-run' + # Enforce proper ownership on oppia, oppia_tools, and node_modules or else NPM installs will fail. + sudo chown -R vagrant.vagrant /home/vagrant/oppia /home/vagrant/oppia_tools /home/vagrant/node_modules +elif [ -f '/usr/bin/google-chrome' ]; then + # Unix. + export CHROME_BIN='/usr/bin/google-chrome' +elif [ -f '/usr/bin/chromium-browser' ]; then + # Unix. + export CHROME_BIN='/usr/bin/chromium-browser' +elif [ -f '/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' ]; then + # Windows. + export CHROME_BIN='/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' +elif [ -f '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' ]; then + # WSL + export CHROME_BIN='/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' +elif [ -f '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome' ]; then + # Mac OS. + export CHROME_BIN='/Applications/Google Chrome.app/Contents/MacOS/Google Chrome' +else + print 'Chrome is not found, stopping ...' + exit 1 +fi + +# This function takes a command for python as its only input. +# It checks this input for a specific version of python and returns false +# if it does not match the expected prefix. +function test_python_version() { + EXPECTED_PYTHON_VERSION_PREFIX='2.7' + PYTHON_VERSION=$($1 --version 2>&1) + if [[ $PYTHON_VERSION =~ Python[[:space:]](.+) ]]; then + PYTHON_VERSION=${BASH_REMATCH[1]} + else + print 'Unrecognizable Python command output: ${PYTHON_VERSION}' + # Return a false condition if output of tested command is unrecognizable. + return 1 + fi + if [[ '${PYTHON_VERSION}' = ${EXPECTED_PYTHON_VERSION_PREFIX}* ]]; then + # Return 0 to indicate a successful match. + # Return 1 to indicate a failed match. + return 0 + else + return 1 + fi +} + +# First, check the default Python command (which should be found within the user's $PATH). +PYTHON_CMD='python' +# Test whether the 'python' or 'python2.7' commands exist and finally fails when +# no suitable python version 2.7 can be found. +if ! test_python_version $PYTHON_CMD; then + print 'Unable to find 'python'. Trying python2.7 instead...' + PYTHON_CMD='python2.7' + if ! test_python_version $PYTHON_CMD; then + print 'Could not find a suitable Python environment. Exiting.' + # If OS is Windows, print helpful error message about adding Python to path. + if [ ! '${OS}' == 'Darwin' -a ! '${OS}' == 'Linux' ]; then + print 'It looks like you are using Windows. If you have Python installed,' + print 'make sure it is in your PATH and that PYTHONPATH is set.' + print 'If you have two versions of Python (ie, Python 2.7 and 3), specify 2.7 before other versions of Python when setting the PATH.' + print 'Here are some helpful articles:' + print 'http://docs.python-guide.org/en/latest/starting/install/win/' + print 'https://stackoverflow.com/questions/3701646/how-to-add-to-the-pythonpath-in-windows-7' + fi + # Exit when no suitable Python environment can be found. + return 1 + fi +fi +export PYTHON_CMD + +export SETUP_DONE=true From 13c37f987507340928529060b1511b1f2a3823a9 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 18 Aug 2019 20:40:10 +0530 Subject: [PATCH 002/141] fix lint --- scripts/run_frontend_tests.py | 52 ++--- scripts/setup.py | 360 ++++++++++++++++------------------ 2 files changed, 195 insertions(+), 217 deletions(-) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 53d8f958b60a..abbc0de5600f 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -12,44 +12,47 @@ # See the License for the specific language governing permissions and # limitations under the License. -########################################################################## +"""INSTRUCTIONS: +Run this script from the oppia root folder: + python -m scripts.run_frontend_tests -# INSTRUCTIONS: -# -# Run this script from the oppia root folder: -# bash scripts/run_frontend_tests.sh -# -# Optional arguments: -# --skip-install=true/false If true, skips installing dependencies. The -# default value is false. -# --run-minified-tests=true/false Whether to run frontend karma tests on both -# minified and non-minified code. The default value is false. -# -# The root folder MUST be named 'oppia'. -# It runs unit tests for frontend JavaScript code (using Karma). -# -# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a -# single test or test suite. +Optional arguments: + --skip_install. If specified, skips installing dependencies. + --run_minified_tests. If specified, runs frontend karma tests on both + minified and non-minified code. + +The root folder MUST be named 'oppia'. +It runs unit tests for frontend JavaScript code (using Karma). + +Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a +single test or test suite. +""" import argparse import os import subprocess from . import build +from . import setup _PARSER = argparse.ArgumentParser() _PARSER.add_argument( - '--generate_coverage_report', - help='optional; if specified, generates a coverage report', + '--skip_install', + help='optional; if specified, skips installing dependencies', + action='store_true') +_PARSER.add_argument( + '--run_minified_tests', + help='optional; if specified, runs frontend karma tests on both minified ' + 'and non-minified code', action='store_true') -os.environ['DEFAULT_SKIP_INSTALLING_THIRD_PARTY_LIBS'] = 'false' -os.environ['DEFAULT_RUN_MINIFIED_TESTS'] = 'false' -maybeInstallDependencies "$@" -XVFB_PREFIX='/usr/bin/xvfb-run' +XVFB_PREFIX = os.environ['XVFB_PREFIX'] def main(): """Runs the frontend tests.""" + parsed_args = _PARSER.parse_args() + setup.maybe_install_dependencies( + parsed_args.skip_install, parsed_args.run_minified_tests) print '' print ' View interactive frontend test coverage reports by navigating to' print '' @@ -68,7 +71,7 @@ def main(): % XVFB_PREFIX) subprocess.call(start_tests_cmd) - if os.environ['RUN_MINIFIED_TESTS'] == "true": + if parsed_args.run_minified_tests is True: print '' print ' Running test in production environment' print '' @@ -82,5 +85,6 @@ def main(): print 'Done!' + if __name__ == '__main__': main() diff --git a/scripts/setup.py b/scripts/setup.py index f416a644d1fb..ac513e1ed6e4 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -12,10 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -########################################################################## - -# This file should not be invoked directly, but sourced from other sh scripts. -# Bash execution environent set up for all scripts. +"""This file should not be invoked directly, but called from other Python +scripts. Python execution environent set up for all scripts. +""" import argparse import os @@ -23,6 +22,7 @@ import subprocess import sys import tarfile +import urllib from . import build @@ -58,201 +58,175 @@ def delete_directory_tree(directory_path): shutil.rmtree(directory_path) -def maybeInstallDependencies( - default_skip_installing_third_party_libs, default_run_minified_tests): - # Parse additional command line arguments. - SKIP_INSTALLING_THIRD_PARTY_LIBS = default_skip_installing_third_party_libs - RUN_MINIFIED_TESTS = default_run_minified_tests - parsed_args = _PARSER.parse_args() - SKIP_INSTALLING_THIRD_PARTY_LIBS = parsed_args.skip_install - RUN_MINIFIED_TESTS = parsed_args.run_minified_tests - - - if SKIP_INSTALLING_THIRD_PARTY_LIBS == 'false': - # Install third party dependencies - subprocess.call('scripts/install_third_party.sh', shell=True) - - # Ensure that generated JS and CSS files are in place before running the - # tests. - print '' - print ' Running build task with concatenation only ' - print '' - - build.build() - - if RUN_MINIFIED_TESTS == 'true': - print '' - print ' Running build task with concatenation and minification' - print '' - - subprocess.call('scripts/build.py --prod_env'.split()) - - -if [ '$SETUP_DONE' ]; then - print 'Environment setup completed.' - return 0 -fi +def maybe_install_dependencies( + skip_installing_third_party_libs, run_minified_tests): + """Parse additional command line arguments.""" - if parsed_args.nojsrepl or parsed_args.noskulpt: - NO_SKULPT=true + if skip_installing_third_party_libs is False: + # Install third party dependencies. + subprocess.call('scripts/install_third_party.sh', shell=True) -export NO_SKULPT + # Ensure that generated JS and CSS files are in place before running the + # tests. + print '' + print 'Running build task with concatenation only ' + print '' + build.build() -EXPECTED_PWD='oppia' -# The second option allows this script to also be run from deployment folders. -if not os.getcwd().endswith(EXPECTED_PWD) and not os.getcwd().endswith( - 'deploy-'): - print '' - print ' WARNING This script should be run from the oppia/ root folder.' - print '' - sys.exit(1) + if run_minified_tests is True: + print '' + print 'Running build task with concatenation and minification' + print '' + subprocess.call('scripts/build.py --prod_env'.split()) -# Set COMMON_DIR to the absolute path of the directory above OPPIA_DIR. This -# is necessary becaue COMMON_DIR (or subsequent variables which refer to it) -# may use it in a situation where relative paths won't work as expected (such -# as $PYTHONPATH). -CURR_DIR = os.path.abspath(os.getcwd()) -OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') -export COMMON_DIR=$(cd $OPPIA_DIR/..; pwd) -export TOOLS_DIR=$COMMON_DIR/oppia_tools -export THIRD_PARTY_DIR=$OPPIA_DIR/third_party -export NODE_MODULE_DIR=$OPPIA_DIR/node_modules -export ME=$(whoami) - -os.mkdir(OPPIA_TOOLS_DIR) -os.mkdir('third_party/') -os.mkdir('node_modules/') - -# Adjust the path to include a reference to node. -NODE_PATH = os.path.join(TOOLS_DIR, 'node-10.15.3') -export PATH=$NODE_PATH/bin:$PATH -export MACHINE_TYPE=`uname -m` -export OS=`uname` - -os_info = os.uname() -if os_info[0] != 'Darwin' and os_info[0] != 'Linux': - # Node is a requirement for all installation scripts. Here, we check if the - # OS supports node.js installation; if not, we exit with an error. - print '' - print ' WARNING: Unsupported OS for installation of node.js.' - print ' If you are running this script on Windows, see the instructions' - print ' here regarding installation of node.js:' - print '' - print ' https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows%29' - print '' - print ' STATUS: Installation completed except for node.js. Exiting.' - print '' - sys.exit(1) -else - # Otherwise, npm will be installed locally, in NODE_PATH. - export NPM_CMD=$NODE_PATH/bin/npm -fi - -export NPM_INSTALL='$NPM_CMD install' - -# Download and install node.js. -print 'Checking if node.js is installed in %s' % TOOLS_DIR -if not os.path.exists(NODE_PATH): - print 'Installing Node.js' - if os_info[0] == 'Darwin': - if os_info[4] == 'x86_64': - NODE_FILE_NAME = 'node-v10.15.3-darwin-x64' - else - NODE_FILE_NAME = 'node-v10.15.3-darwin-x86' - elif os_info[0] == 'Linux': - if os_info[4] == 'x86_64': - NODE_FILE_NAME = 'node-v10.15.3-linux-x64' - else - NODE_FILE_NAME = 'node-v10.15.3-linux-x86' - -urllib.urlretrieve( - 'https://nodejs.org/dist/v10.15.3/%s.tar.gz' % NODE_FILE_NAME, - filename='node-download.tgz') -tar = tarfile.open('node-download.tgz') -tar.extractall(path=TOOLS_DIR) -tar.close() -delete_directory_tree('node-download.tgz') - - # Change ownership of $NODE_MODULE_DIR. - # Note: on some machines, these commands seem to take quite a long time. - os.chown('node_modules/', os.getuid(), -1) - os.chmod('node_modules/', 744) - -# Adjust path to support the default Chrome locations for Unix, Windows and Mac OS. -if [ '$TRAVIS' == true ]; then - export CHROME_BIN='/usr/bin/chromium-browser' -elif [ '$VAGRANT' == true ] || [ -f '/etc/is_vagrant_vm' ]; then - # XVFB is required for headless testing in Vagrant - sudo apt-get install xvfb chromium-browser - export CHROME_BIN='/usr/bin/chromium-browser' - # Used in frontend and e2e tests. Only gets set if using Vagrant VM. - export XVFB_PREFIX='/usr/bin/xvfb-run' - # Enforce proper ownership on oppia, oppia_tools, and node_modules or else NPM installs will fail. - sudo chown -R vagrant.vagrant /home/vagrant/oppia /home/vagrant/oppia_tools /home/vagrant/node_modules -elif [ -f '/usr/bin/google-chrome' ]; then - # Unix. - export CHROME_BIN='/usr/bin/google-chrome' -elif [ -f '/usr/bin/chromium-browser' ]; then - # Unix. - export CHROME_BIN='/usr/bin/chromium-browser' -elif [ -f '/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' ]; then - # Windows. - export CHROME_BIN='/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' -elif [ -f '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' ]; then - # WSL - export CHROME_BIN='/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe' -elif [ -f '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome' ]; then - # Mac OS. - export CHROME_BIN='/Applications/Google Chrome.app/Contents/MacOS/Google Chrome' -else - print 'Chrome is not found, stopping ...' - exit 1 -fi # This function takes a command for python as its only input. # It checks this input for a specific version of python and returns false # if it does not match the expected prefix. -function test_python_version() { - EXPECTED_PYTHON_VERSION_PREFIX='2.7' - PYTHON_VERSION=$($1 --version 2>&1) - if [[ $PYTHON_VERSION =~ Python[[:space:]](.+) ]]; then - PYTHON_VERSION=${BASH_REMATCH[1]} - else - print 'Unrecognizable Python command output: ${PYTHON_VERSION}' - # Return a false condition if output of tested command is unrecognizable. - return 1 - fi - if [[ '${PYTHON_VERSION}' = ${EXPECTED_PYTHON_VERSION_PREFIX}* ]]; then - # Return 0 to indicate a successful match. - # Return 1 to indicate a failed match. - return 0 - else - return 1 - fi -} +def test_python_version(): + running_python_version = '{0[0]}.{0[1]}'.format(sys.version_info) + if running_python_version != '2.7': + print 'Please use Python2.7. Exiting...' + # If OS is Windows, print helpful error message about adding Python to + # path. + os_info = os.uname() + if os_info[0] != 'Darwin' and os_info[0] != 'Linux': + print ( + 'It looks like you are using Windows. If you have Python ' + 'installed,') + print 'make sure it is in your PATH and that PYTHONPATH is set.' + print ( + 'If you have two versions of Python (ie, Python 2.7 and 3), ' + 'specify 2.7 before other versions of Python when setting the ' + 'PATH.') + print 'Here are some helpful articles:' + print 'http://docs.python-guide.org/en/latest/starting/install/win/' + print ( + 'https://stackoverflow.com/questions/3701646/how-to-add-to-the-' + 'pythonpath-in-windows-7') + # Exit when no suitable Python environment can be found. + sys.exit(1) + + +def main(): + """Runs the script to setup Oppia.""" + test_python_version() -# First, check the default Python command (which should be found within the user's $PATH). -PYTHON_CMD='python' -# Test whether the 'python' or 'python2.7' commands exist and finally fails when -# no suitable python version 2.7 can be found. -if ! test_python_version $PYTHON_CMD; then - print 'Unable to find 'python'. Trying python2.7 instead...' - PYTHON_CMD='python2.7' - if ! test_python_version $PYTHON_CMD; then - print 'Could not find a suitable Python environment. Exiting.' - # If OS is Windows, print helpful error message about adding Python to path. - if [ ! '${OS}' == 'Darwin' -a ! '${OS}' == 'Linux' ]; then - print 'It looks like you are using Windows. If you have Python installed,' - print 'make sure it is in your PATH and that PYTHONPATH is set.' - print 'If you have two versions of Python (ie, Python 2.7 and 3), specify 2.7 before other versions of Python when setting the PATH.' - print 'Here are some helpful articles:' - print 'http://docs.python-guide.org/en/latest/starting/install/win/' - print 'https://stackoverflow.com/questions/3701646/how-to-add-to-the-pythonpath-in-windows-7' - fi - # Exit when no suitable Python environment can be found. - return 1 - fi -fi -export PYTHON_CMD - -export SETUP_DONE=true + parsed_args = _PARSER.parse_args() + os.environ['NO_SKULPT'] = bool(parsed_args.nojsrepl or parsed_args.noskulpt) + + # The second option allows this script to also be run from deployment + # folders. + if not os.getcwd().endswith('oppia') and not os.getcwd().endswith( + 'deploy-'): + print '' + print 'WARNING This script should be run from the oppia/ root folder.' + print '' + sys.exit(1) + + # Set COMMON_DIR to the absolute path of the directory above OPPIA_DIR. This + # is necessary becaue COMMON_DIR (or subsequent variables which refer to it) + # may use it in a situation where relative paths won't work as expected(such + # as $PYTHONPATH). + curr_dir = os.path.abspath(os.getcwd()) + oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') + + os.mkdir(oppia_tools_dir) + os.mkdir('third_party/') + os.mkdir('node_modules/') + + # Adjust the path to include a reference to node. + node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') + + os_info = os.uname() + if os_info[0] != 'Darwin' and os_info[0] != 'Linux': + # Node is a requirement for all installation scripts. Here, we check if + # the OS supports node.js installation; if not, we exit with an error. + print '' + print 'WARNING: Unsupported OS for installation of node.js.' + print 'If you are running this script on Windows, see the instructions' + print 'here regarding installation of node.js:' + print '' + print ( + 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows' + '%29') + print '' + print 'STATUS: Installation completed except for node.js. Exiting.' + print '' + sys.exit(1) + + # Download and install node.js. + print 'Checking if node.js is installed in %s' % oppia_tools_dir + if not os.path.exists(node_path): + print 'Installing Node.js' + if os_info[0] == 'Darwin': + if os_info[4] == 'x86_64': + node_file_name = 'node-v10.15.3-darwin-x64' + else: + node_file_name = 'node-v10.15.3-darwin-x86' + elif os_info[0] == 'Linux': + if os_info[4] == 'x86_64': + node_file_name = 'node-v10.15.3-linux-x64' + else: + node_file_name = 'node-v10.15.3-linux-x86' + + urllib.urlretrieve( + 'https://nodejs.org/dist/v10.15.3/%s.tar.gz' % node_file_name, + filename='node-download.tgz') + tar = tarfile.open(name='node-download.tgz') + tar.extractall(path=oppia_tools_dir) + tar.close() + delete_directory_tree('node-download.tgz') + + # Change ownership of $NODE_MODULE_DIR. + # Note: on some machines, these commands seem to take quite a long time. + os.chown('node_modules/', os.getuid(), -1) + os.chmod('node_modules/', 744) + + # Adjust path to support the default Chrome locations for Unix, Windows and + # Mac OS. + if os.environ['TRAVIS'] is True: + chrome_bin = '/usr/bin/chromium-browser' + elif os.environ['VAGRANT'] is True or os.path.isfile('/etc/is_vagrant_vm'): + # XVFB is required for headless testing in Vagrant. + subprocess.call('sudo apt-get install xvfb chromium-browser'.split()) + chrome_bin = '/usr/bin/chromium-browser' + # Used in frontend and e2e tests. Only gets set if using Vagrant VM. + os.environ['XVFB_PREFIX'] = '/usr/bin/xvfb-run' + # Enforce proper ownership on oppia, oppia_tools, and node_modules or + # else NPM installs will fail. + subprocess.call( + 'sudo chown -R vagrant.vagrant /home/vagrant/oppia ' + '/home/vagrant/oppia_tools /home/vagrant/node_modules'.split()) + elif os.path.isfile('/usr/bin/google-chrome'): + # Unix. + chrome_bin = '/usr/bin/google-chrome' + elif os.path.isfile('/usr/bin/chromium-browser'): + # Unix. + chrome_bin = '/usr/bin/chromium-browser' + elif os.path.isfile( + '/c/Program Files (x86)/Google/Chrome/Application/chrome.exe'): + # Windows. + chrome_bin = ( + '/c/Program Files (x86)/Google/Chrome/Application/chrome.exe') + elif os.path.isfile( + '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe'): + # WSL. + chrome_bin = ( + '/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe') + elif os.path.isfile( + '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome'): + # Mac OS. + chrome_bin = ( + '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome') + else: + print 'Chrome is not found, stopping ...' + sys.exit(1) + + os.environ['CHROME_BIN'] = chrome_bin + print 'Environment setup completed.' + sys.exit(0) + + +if __name__ == '__main__': + main() From 9ae6f2e90d32cda238db2badcbaabd086e464754 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 19 Aug 2019 01:35:59 +0530 Subject: [PATCH 003/141] fix --- scripts/build.py | 14 +++++---- scripts/run_frontend_tests.py | 17 ++++++---- scripts/setup.py | 58 ++++++++++++++++++----------------- 3 files changed, 49 insertions(+), 40 deletions(-) diff --git a/scripts/build.py b/scripts/build.py index 3b13b5d36d8a..afdbac14b794 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -15,11 +15,11 @@ """Build file for production version of Oppia. Minifies JS and CSS.""" # pylint: disable=invalid-name +import argparse import collections import fnmatch import hashlib import json -import optparse import os import re import shutil @@ -1317,15 +1317,17 @@ def build(): built and stored. Depending on the options passed to the script, might also minify third-party libraries and/or generate a build directory. """ - parser = optparse.OptionParser() - parser.add_option( + parser = argparse.ArgumentParser() + parser.add_argument( '--prod_env', action='store_true', default=False, dest='prod_mode') - parser.add_option( + parser.add_argument( '--minify_third_party_libs_only', action='store_true', default=False, dest='minify_third_party_libs_only') - parser.add_option( + parser.add_argument( '--enable_watcher', action='store_true', default=False) - options = parser.parse_args()[0] + # We use parse_known_args() to ignore the extra arguments which maybe used + # while calling this method from other Python scripts. + options, _ = parser.parse_known_args() # Regenerate /third_party/generated from scratch. safe_delete_directory_tree(THIRD_PARTY_GENERATED_DEV_DIR) build_third_party_libs(THIRD_PARTY_GENERATED_DEV_DIR) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index abbc0de5600f..0fc97572d389 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -45,11 +45,14 @@ help='optional; if specified, runs frontend karma tests on both minified ' 'and non-minified code', action='store_true') -XVFB_PREFIX = os.environ['XVFB_PREFIX'] def main(): """Runs the frontend tests.""" + setup.main() + xvfb_prefix = '' + if os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): + xvfb_prefix = '/usr/bin/xvfb-run' parsed_args = _PARSER.parse_args() setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -68,20 +71,22 @@ def main(): start_tests_cmd = ( '%s node_modules/karma/bin/karma start core/tests/karma.conf.ts' - % XVFB_PREFIX) - subprocess.call(start_tests_cmd) + % xvfb_prefix) + subprocess.call(start_tests_cmd.split()) if parsed_args.run_minified_tests is True: print '' print ' Running test in production environment' print '' - os.system('scripts/build.py prod_env minify_third_party_libs_only') + subprocess.call( + 'python scripts/build.py --prod_env --minify_third_party_libs_only' + .split()) start_tests_cmd = ( '%s node_modules/karma/bin/karma start ' - 'core/tests/karma.conf.ts --prodEnv' % XVFB_PREFIX) - subprocess.call(start_tests_cmd) + 'core/tests/karma.conf.ts --prodEnv' % xvfb_prefix) + subprocess.call(start_tests_cmd.split()) print 'Done!' diff --git a/scripts/setup.py b/scripts/setup.py index ac513e1ed6e4..af7b13a14fce 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -27,15 +27,6 @@ from . import build _PARSER = argparse.ArgumentParser() -_PARSER.add_argument( - '--skip_install', - help='optional; if specified, skips installing dependencies', - action='store_true') -_PARSER.add_argument( - '--run_minified_tests', - help='optional; if specified, runs frontend karma tests on both minified ' - 'and non-minified code', - action='store_true') _PARSER.add_argument( '--nojsrepl', help='optional; if specified, skips installation of skulpt.', @@ -58,14 +49,24 @@ def delete_directory_tree(directory_path): shutil.rmtree(directory_path) +def create_directory(directory_path): + """Creates a new directory. Does not do anything if directory already + exists. + + Args: + directory_path: str. Directory path to be created. + """ + if os.path.exists(directory_path): + return + os.mkdir(directory_path) + + def maybe_install_dependencies( skip_installing_third_party_libs, run_minified_tests): """Parse additional command line arguments.""" - if skip_installing_third_party_libs is False: # Install third party dependencies. - subprocess.call('scripts/install_third_party.sh', shell=True) - + subprocess.call('bash scripts/install_third_party.sh'.split()) # Ensure that generated JS and CSS files are in place before running the # tests. print '' @@ -77,7 +78,7 @@ def maybe_install_dependencies( print '' print 'Running build task with concatenation and minification' print '' - subprocess.call('scripts/build.py --prod_env'.split()) + subprocess.call('python scripts/build.py --prod_env'.split()) # This function takes a command for python as its only input. @@ -112,8 +113,10 @@ def main(): """Runs the script to setup Oppia.""" test_python_version() - parsed_args = _PARSER.parse_args() - os.environ['NO_SKULPT'] = bool(parsed_args.nojsrepl or parsed_args.noskulpt) + # We use parse_known_args() to ignore the extra arguments which maybe used + # while calling this method from other Python scripts. + parsed_args, _ = _PARSER.parse_known_args() + os.environ['NO_SKULPT'] = str(parsed_args.nojsrepl or parsed_args.noskulpt) # The second option allows this script to also be run from deployment # folders. @@ -131,9 +134,9 @@ def main(): curr_dir = os.path.abspath(os.getcwd()) oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - os.mkdir(oppia_tools_dir) - os.mkdir('third_party/') - os.mkdir('node_modules/') + create_directory(oppia_tools_dir) + create_directory('third_party/') + create_directory('node_modules/') # Adjust the path to include a reference to node. node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') @@ -170,13 +173,13 @@ def main(): else: node_file_name = 'node-v10.15.3-linux-x86' - urllib.urlretrieve( - 'https://nodejs.org/dist/v10.15.3/%s.tar.gz' % node_file_name, - filename='node-download.tgz') - tar = tarfile.open(name='node-download.tgz') - tar.extractall(path=oppia_tools_dir) - tar.close() - delete_directory_tree('node-download.tgz') + urllib.urlretrieve( + 'https://nodejs.org/dist/v10.15.3/%s.tar.gz' % node_file_name, + filename='node-download.tgz') + tar = tarfile.open(name='node-download.tgz') + tar.extractall(path=oppia_tools_dir) + tar.close() + delete_directory_tree('node-download.tgz') # Change ownership of $NODE_MODULE_DIR. # Note: on some machines, these commands seem to take quite a long time. @@ -185,9 +188,9 @@ def main(): # Adjust path to support the default Chrome locations for Unix, Windows and # Mac OS. - if os.environ['TRAVIS'] is True: + if os.environ.get('TRAVIS'): chrome_bin = '/usr/bin/chromium-browser' - elif os.environ['VAGRANT'] is True or os.path.isfile('/etc/is_vagrant_vm'): + elif os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): # XVFB is required for headless testing in Vagrant. subprocess.call('sudo apt-get install xvfb chromium-browser'.split()) chrome_bin = '/usr/bin/chromium-browser' @@ -225,7 +228,6 @@ def main(): os.environ['CHROME_BIN'] = chrome_bin print 'Environment setup completed.' - sys.exit(0) if __name__ == '__main__': From 2eca67e08b171f4917abd4b7db0c90d3ef2eeb88 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 19 Aug 2019 01:43:03 +0530 Subject: [PATCH 004/141] fix --- scripts/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/setup.py b/scripts/setup.py index af7b13a14fce..f2040e6efaae 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -179,7 +179,7 @@ def main(): tar = tarfile.open(name='node-download.tgz') tar.extractall(path=oppia_tools_dir) tar.close() - delete_directory_tree('node-download.tgz') + os.remove('node-download.tgz') # Change ownership of $NODE_MODULE_DIR. # Note: on some machines, these commands seem to take quite a long time. From c9b6f98ac287d55f89faa1df7c2e6a2f2d64e580 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 20 Aug 2019 02:47:51 +0530 Subject: [PATCH 005/141] conver --- scripts/setup_gae.py | 93 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 scripts/setup_gae.py diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py new file mode 100644 index 000000000000..20a2b9309cca --- /dev/null +++ b/scripts/setup_gae.py @@ -0,0 +1,93 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This file should not be invoked directly, but called from other Python +scripts. Python execution environment setup for scripts that require GAE. +""" + +import os +import sys +import tarfile +import urllib +import zipfile + + +def main(): + """Runs the script to setup GAE.""" + curr_dir = os.path.abspath(os.getcwd()) + oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') + google_app_engine_home = os.path.join( + oppia_tools_dir, 'google_appengine_1.9.67/google_appengine') + google_cloud_sdk_home = os.path.join( + oppia_tools_dir, 'google-cloud-sdk-251.0.0/google-cloud-sdk') + coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.3') + + # Note that if the following line is changed so that it uses webob_1_1_1, + # PUT requests from the frontend fail. + sys.append(coverage_home) + sys.append(google_app_engine_home) + sys.append(os.path.join(google_app_engine_home, 'lib/webob_0_9')) + sys.append(os.path.join(oppia_tools_dir, 'webtest-2.0.33')) + + # Delete old *.pyc files. + for directory, _, files in os.walk('.'): + for file_name in files: + if file_name.endswith('.pyc'): + filepath = os.path.join(directory, file_name) + os.remove(filepath) + + print ( + 'Checking whether Google App Engine is installed in %s' + % google_app_engine_home) + if not os.path.exists(google_app_engine_home): + print 'Downloading Google App Engine (this may take a little while)...' + os.mkdir(google_app_engine_home) + try: + urllib.urlretrieve( + 'https://storage.googleapis.com/appengine-sdks/featured/' + 'google_appengine_1.9.67.zip', filename='gae-download.zip') + except Exception: + print 'Error downloading Google App Engine. Exiting.' + sys.exit(1) + print 'Download complete. Installing Google App Engine...' + with zipfile.ZipFile('gae-download.zip', 'r') as zip_ref: + zip_ref.extractall( + path=os.path.join(oppia_tools_dir, 'google_appengine_1.9.67/')) + os.remove('gae-download.zip') + + + print ( + 'Checking whether google-cloud-sdk is installed in %s' + % google_cloud_sdk_home) + if not os.path.exists('google_cloud_sdk_home'): + print 'Downloading Google Cloud SDK (this may take a little while)...' + os.mkdir(google_cloud_sdk_home) + try: + urllib.urlretrieve( + 'https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/' + 'google-cloud-sdk-251.0.0-linux-x86_64.tar.gz', + filename='gcloud-sdk.tar.gz') + except Exception: + print 'Error downloading Google Cloud SDK. Exiting.' + sys.exit(1) + print 'Download complete. Installing Google Cloud SDK...' + tar = tarfile.open(name='gcloud-sdk.tar.gz') + tar.extractall( + path=os.path.join(oppia_tools_dir, 'google-cloud-sdk-251.0.0/')) + tar.close() + os.remove('gcloud-sdk.tar.gz') + + +if __name__ == '__main__': + main() From 721809286c0dd761e4c64c151aaced229f09ba03 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 20 Aug 2019 02:48:46 +0530 Subject: [PATCH 006/141] fix --- scripts/run_frontend_tests.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 0fc97572d389..5fca47587a7b 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -34,6 +34,7 @@ from . import build from . import setup +from . import setup_gae _PARSER = argparse.ArgumentParser() _PARSER.add_argument( @@ -50,6 +51,7 @@ def main(): """Runs the frontend tests.""" setup.main() + setup_gae.main() xvfb_prefix = '' if os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): xvfb_prefix = '/usr/bin/xvfb-run' From 9250dcbb295f15c15126e641e15838eb4e4d108f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 20 Aug 2019 02:52:21 +0530 Subject: [PATCH 007/141] fix --- scripts/setup_gae.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 20a2b9309cca..4d6f71008468 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -35,10 +35,10 @@ def main(): # Note that if the following line is changed so that it uses webob_1_1_1, # PUT requests from the frontend fail. - sys.append(coverage_home) - sys.append(google_app_engine_home) - sys.append(os.path.join(google_app_engine_home, 'lib/webob_0_9')) - sys.append(os.path.join(oppia_tools_dir, 'webtest-2.0.33')) + sys.path.append(coverage_home) + sys.path.append(google_app_engine_home) + sys.path.append(os.path.join(google_app_engine_home, 'lib/webob_0_9')) + sys.path.append(os.path.join(oppia_tools_dir, 'webtest-2.0.33')) # Delete old *.pyc files. for directory, _, files in os.walk('.'): @@ -70,7 +70,7 @@ def main(): print ( 'Checking whether google-cloud-sdk is installed in %s' % google_cloud_sdk_home) - if not os.path.exists('google_cloud_sdk_home'): + if not os.path.exists(google_cloud_sdk_home): print 'Downloading Google Cloud SDK (this may take a little while)...' os.mkdir(google_cloud_sdk_home) try: From 9d33246f9382607acd9fb1b7a260f3cb9f2a240d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 20 Aug 2019 03:30:34 +0530 Subject: [PATCH 008/141] convert --- scripts/run_backend_tests.py | 131 +++++++++++++++++++++++++++++++++++ 1 file changed, 131 insertions(+) create mode 100644 scripts/run_backend_tests.py diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py new file mode 100644 index 000000000000..e83806e01d8b --- /dev/null +++ b/scripts/run_backend_tests.py @@ -0,0 +1,131 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""INSTRUCTIONS: + +Run this script from the oppia root folder: + python -m scripts.run_backend_tests + +It runs all the (Python) backend tests, in parallel. + +===================== +CUSTOMIZATION OPTIONS +===================== + +(1) Generate a coverage report by adding the argument + + --generate_coverage_report + +but note that this will slow down the tests by a factor of 1.5 or more. + +(2) Append a test target to make the script run all tests in a given module +or class, or run a particular test. For example, appending + + --test_target='foo.bar.Baz' + +runs all tests in test class Baz in the foo/bar.py module, and appending + + --test_target='foo.bar.Baz.quux' + +runs the test method quux in the test class Baz in the foo/bar.py module. + +(3) Append a test path to make the script run all tests in a given +subdirectory. For example, appending + + --test_path='core/controllers' + +runs all tests in the core/controllers/ directory. + +(4) Enable the verbose log by add the argument. It will display the outputs of + the tests being run. + + --verbose or -v + +IMPORTANT: Only one of --test_path and --test_target should be specified. +""" + +import argparse +import os +import subprocess +import tarfile +import urllib + +from . import backend_tests +from . import build +from . import setup +from . import setup_gae + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--generate_coverage_report', + help='optional; if specified, generates a coverage report', + action='store_true') + + +def main(): + """Runs the backend tests.""" + setup.main() + setup_gae.main() + + # Install third party dependencies. + subprocess.call('bash scripts/install_third_party.sh'.split()) + + curr_dir = os.path.abspath(os.getcwd()) + oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') + coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.3') + coverage_path = os.path.join(coverage_home, 'coverage') + + parsed_args = _PARSER.parse_args() + if parsed_args.generate_coverage_report: + print 'Checking whether coverage is installed in %s' % oppia_tools_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'coverage-4.5.3')): + print 'Installing coverage' + urllib.urlretrieve( + 'https://files.pythonhosted.org/packages/85/d5/' + '818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/' + 'coverage-4.5.4.tar.gz', filename='coverage-4.5.4.tar.gz') + tar = tarfile.open(name='coverage-4.5.4.tar.gz') + tar.extractall( + path=os.path.join(oppia_tools_dir, 'coverage-4.5.4')) + tar.close() + os.remove('coverage-4.5.4.tar.gz') + + # Compile typescript files. + print 'Compiling typescript...' + subprocess.call('node_modules/typescript/bin/tsc --project .'.split()) + + print 'Compiling webpack...' + subprocess.call( + 'node_modules/webpack/bin/webpack.js --config webpack.prod.config.ts' + .split()) + + build.build() + backend_tests.main() + + if parsed_args.generate_coverage_report: + subprocess.call(('python %s combine' % coverage_path).split()) + subprocess.call( + ('python %s report --omit="%s*","third_party/*","/usr/share/*" ' + '--show-missing' % (coverage_path, oppia_tools_dir)).split()) + + print 'Generating xml coverage report...' + subprocess.call(('python %s xml' % coverage_path).split()) + + print '' + print 'Done!' + print '' + + +if __name__ == '__main__': + main() From 41113da8eb226e619465993259efd29a0bd78046 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 20 Aug 2019 04:12:06 +0530 Subject: [PATCH 009/141] fix --- appengine_config.py | 3 --- scripts/backend_tests.py | 10 ++-------- scripts/run_backend_tests.py | 8 ++++---- scripts/run_backend_tests.sh | 4 ++-- scripts/setup_gae.py | 3 ++- scripts/setup_gae.sh | 2 +- 6 files changed, 11 insertions(+), 19 deletions(-) diff --git a/appengine_config.py b/appengine_config.py index 8bbeb4ee7d08..34818844e243 100644 --- a/appengine_config.py +++ b/appengine_config.py @@ -107,6 +107,3 @@ def save(self): if not os.path.isdir(lib_path): raise Exception('Invalid path for third_party library: %s' % lib_path) sys.path.insert(0, lib_path) - -# Required, otherwise MapReduce third-party library will throw errors. -os.environ['PYTHONPATH'] = ','.join(sys.path) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index 16b03c6ecb29..d9c1af741b12 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -39,7 +39,6 @@ CURR_DIR = os.path.abspath(os.getcwd()) OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') THIRD_PARTY_DIR = os.path.join(CURR_DIR, 'third_party') -PYTHONPATH = os.environ['PYTHONPATH'] DIRS_TO_ADD_TO_SYS_PATH = [ os.path.join(OPPIA_TOOLS_DIR, 'pylint-1.9.4'), @@ -70,7 +69,8 @@ ] COVERAGE_PATH = os.path.join( - os.getcwd(), '..', 'oppia_tools', 'coverage-4.5.3', 'coverage') + os.getcwd(), '..', 'oppia_tools', 'coverage-4.5.4', 'coverage-4.5.4', + 'coverage') TEST_RUNNER_PATH = os.path.join(os.getcwd(), 'core', 'tests', 'gae_suite.py') LOG_LOCK = threading.Lock() ALL_ERRORS = [] @@ -180,12 +180,6 @@ def __init__(self, test_target, generate_coverage_report): def run(self): """Runs all tests corresponding to the given test target.""" test_target_flag = '--test_target=%s' % self.test_target - - # This is done because PYTHONPATH is modified while using importlib - # to import modules. PYTHONPATH is changed to comma separated list - # after which python is unable to find certain modules. So, the old - # PYTHONPATH is copied here to avoid import errors. - os.environ['PYTHONPATH'] = PYTHONPATH if self.generate_coverage_report: exc_list = [ 'python', COVERAGE_PATH, 'run', '-p', TEST_RUNNER_PATH, diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index e83806e01d8b..ac7b75c4f2bb 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -83,13 +83,13 @@ def main(): curr_dir = os.path.abspath(os.getcwd()) oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.3') - coverage_path = os.path.join(coverage_home, 'coverage') + coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.4') + coverage_path = os.path.join(coverage_home, 'coverage-4.5.4', 'coverage') - parsed_args = _PARSER.parse_args() + parsed_args, _ = _PARSER.parse_known_args() if parsed_args.generate_coverage_report: print 'Checking whether coverage is installed in %s' % oppia_tools_dir - if not os.path.exists(os.path.join(oppia_tools_dir, 'coverage-4.5.3')): + if not os.path.exists(os.path.join(oppia_tools_dir, 'coverage-4.5.4')): print 'Installing coverage' urllib.urlretrieve( 'https://files.pythonhosted.org/packages/85/d5/' diff --git a/scripts/run_backend_tests.sh b/scripts/run_backend_tests.sh index 8aa3a3208320..c04a0be473c7 100755 --- a/scripts/run_backend_tests.sh +++ b/scripts/run_backend_tests.sh @@ -78,9 +78,9 @@ bash scripts/install_third_party.sh for arg in "$@"; do if [ "$arg" == "--generate_coverage_report" ]; then echo Checking whether coverage is installed in $TOOLS_DIR - if [ ! -d "$TOOLS_DIR/coverage-4.5.3" ]; then + if [ ! -d "$TOOLS_DIR/coverage-4.5.4" ]; then echo Installing coverage - pip install coverage==4.5.3 --target="$TOOLS_DIR/coverage-4.5.3" + pip install coverage==4.5.4 --target="$TOOLS_DIR/coverage-4.5.4" fi fi done diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 4d6f71008468..83a30b8eb066 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -31,10 +31,11 @@ def main(): oppia_tools_dir, 'google_appengine_1.9.67/google_appengine') google_cloud_sdk_home = os.path.join( oppia_tools_dir, 'google-cloud-sdk-251.0.0/google-cloud-sdk') - coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.3') + coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.4') # Note that if the following line is changed so that it uses webob_1_1_1, # PUT requests from the frontend fail. + sys.path.append('.') sys.path.append(coverage_home) sys.path.append(google_app_engine_home) sys.path.append(os.path.join(google_app_engine_home, 'lib/webob_0_9')) diff --git a/scripts/setup_gae.sh b/scripts/setup_gae.sh index ea2a7b0f26fb..d5a9b13da0c1 100644 --- a/scripts/setup_gae.sh +++ b/scripts/setup_gae.sh @@ -24,7 +24,7 @@ fi export GOOGLE_APP_ENGINE_HOME=$TOOLS_DIR/google_appengine_1.9.67/google_appengine export GOOGLE_CLOUD_SDK_HOME=$TOOLS_DIR/google-cloud-sdk-251.0.0/google-cloud-sdk -export COVERAGE_HOME=$TOOLS_DIR/coverage-4.5.3 +export COVERAGE_HOME=$TOOLS_DIR/coverage-4.5.4 # Note that if the following line is changed so that it uses webob_1_1_1, PUT requests from the frontend fail. export PYTHONPATH=.:$COVERAGE_HOME:$GOOGLE_APP_ENGINE_HOME:$GOOGLE_APP_ENGINE_HOME/lib/webob_0_9:$TOOLS_DIR/webtest-2.0.33:$PYTHONPATH From beb3c52f9f364087e4956c024f0429ab3dd1c79b Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 22 Aug 2019 14:57:06 +0530 Subject: [PATCH 010/141] add --- scripts/install_third_party.py | 4 +- scripts/install_third_party_libs.py | 206 ++++++++++++++++++++++++++++ 2 files changed, 208 insertions(+), 2 deletions(-) create mode 100644 scripts/install_third_party_libs.py diff --git a/scripts/install_third_party.py b/scripts/install_third_party.py index e9d6ca93717c..b145ddb10ca7 100644 --- a/scripts/install_third_party.py +++ b/scripts/install_third_party.py @@ -323,10 +323,10 @@ def download_manifest_files(filepath): dependency_tar_root_name, dependency_target_root_name) -def _install_third_party_libs(): +def install_third_party_libs(): """Installs all the third party libraries.""" download_manifest_files(MANIFEST_FILE_PATH) if __name__ == '__main__': - _install_third_party_libs() + install_third_party_libs() diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py new file mode 100644 index 000000000000..e0328a1bc88f --- /dev/null +++ b/scripts/install_third_party_libs.py @@ -0,0 +1,206 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import install_third_party + +set -e +source $(dirname $0)/setup.sh || exit 1 + +# Download and install required JS and zip files. +print 'Installing third-party JS libraries and zip files.' +install_third_party.install_third_party_libs() + +curr_dir = os.path.abspath(os.getcwd()) +oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') +node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') +# Install third-party node modules needed for the build process. +subprocess.call(('%s/bin/npm install --only=dev' % node_path).split()) +# This line removes the "npm ERR! missing:" messages. For reference, see this +# thread: https://github.com/npm/npm/issues/19393#issuecomment-374076889 +subprocess.call(('%s/bin/npm dedupe' % node_path).split()) + +# Download and install Skulpt. Skulpt is built using a Python script included +# within the Skulpt repository (skulpt.py). This script normally requires +# GitPython, however the patches to it below (with the sed operations) lead to +# it no longer being required. The Python script is used to avoid having to +# manually recreate the Skulpt dist build process in install_third_party.py. +# Note that skulpt.py will issue a warning saying its dist command will not +# work properly without GitPython, but it does actually work due to the +# patches. +print 'Checking whether Skulpt is installed in third_party' +if [ ! "$NO_SKULPT" -a ! -d "$THIRD_PARTY_DIR/static/skulpt-0.10.0" ]; then + if [ ! -d "$TOOLS_DIR/skulpt-0.10.0" ]; then + print Downloading Skulpt + cd $TOOLS_DIR + mkdir skulpt-0.10.0 + cd skulpt-0.10.0 + git clone https://github.com/skulpt/skulpt + cd skulpt + + # Use a specific Skulpt release. + git checkout 0.10.0 + + # Add a temporary backup file so that this script works on both Linux and + # Mac. + TMP_FILE=`mktemp /tmp/backup.XXXXXXXXXX` + + print Compiling Skulpt + + # The Skulpt setup function needs to be tweaked. It fails without certain + # third party commands. These are only used for unit tests and generating + # documentation and are not necessary when building Skulpt. + sed -e "s/ret = test()/ret = 0/" $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py |\ + sed -e "s/ doc()/ pass#doc()/" |\ + # This and the next command disable unit and compressed unit tests for the + # compressed distribution of Skulpt. These tests don't work on some + # Ubuntu environments and cause a libreadline dependency issue. + sed -e "s/ret = os.system(\"{0}/ret = 0 #os.system(\"{0}/" |\ + sed -e "s/ret = rununits(opt=True)/ret = 0/" > $TMP_FILE + mv $TMP_FILE $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py + $PYTHON_CMD $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py dist + + # Return to the Oppia root folder. + cd $OPPIA_DIR + fi + + # Move the build directory to the static resources folder. + mkdir -p $THIRD_PARTY_DIR/static/skulpt-0.10.0 + cp -r $TOOLS_DIR/skulpt-0.10.0/skulpt/dist/* $THIRD_PARTY_DIR/static/skulpt-0.10.0 +fi + +# Checking if pip is installed. If you are having +# trouble, please ensure that you have pip installed (see "Installing Oppia" +# on the Oppia developers' wiki page). +print Checking if pip is installed on the local machine +if ! type pip > /dev/null 2>&1 ; then + print "" + print " Pip is required to install Oppia dependencies, but pip wasn't found" + print " on your local machine." + print "" + print " Please see \"Installing Oppia\" on the Oppia developers' wiki page:" + + if [ "${OS}" == "Darwin" ] ; then + print " https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-OS%29" + else + print " https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux%29" + fi + + # If pip is not installed, quit. + exit 1 +fi + +function pip_install { + # Attempt standard pip install, or pass in --system if the local environment requires it. + # See https://github.com/pypa/pip/issues/3826 for context on when this situation may occur. + pip install "$@" || pip install --system "$@" +} + +print Checking if pylint is installed in $TOOLS_DIR +if [ ! -d "$TOOLS_DIR/pylint-1.9.4" ]; then + print Installing Pylint + + pip_install pylint==1.9.4 --target="$TOOLS_DIR/pylint-1.9.4" +fi + +print Checking if Pillow is installed in $TOOLS_DIR +if [ ! -d "$TOOLS_DIR/Pillow-6.0.0" ]; then + print Installing Pillow + + pip_install Pillow==6.0.0 --target="$TOOLS_DIR/Pillow-6.0.0" + + if [[ $? != 0 && ${OS} == "Darwin" ]]; then + print " Pillow install failed. See troubleshooting instructions at:" + print " https://github.com/oppia/oppia/wiki/Troubleshooting#mac-os" + fi + +fi + +print Checking if pylint-quotes is installed in $TOOLS_DIR +if [ ! -d "$TOOLS_DIR/pylint-quotes-0.2.1" ]; then + print Installing pylint-quotes + # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. + curl -o pylint-quotes-0.2.1.tar.gz -L https://github.com/edaniszewski/pylint-quotes/archive/0.2.1.tar.gz + tar xzf pylint-quotes-0.2.1.tar.gz -C $TOOLS_DIR + rm pylint-quotes-0.2.1.tar.gz +fi + +# Install webtest. +print Checking if webtest is installed in third_party +if [ ! -d "$TOOLS_DIR/webtest-2.0.33" ]; then + print Installing webtest framework + # Note that the github URL redirects, so we pass in -L to tell curl to follow the redirect. + curl -o webtest-2.0.33.zip -L https://github.com/Pylons/webtest/archive/2.0.33.zip + unzip webtest-2.0.33.zip -d $TOOLS_DIR + rm webtest-2.0.33.zip +fi + +# Install isort. +print Checking if isort is installed in third_party +if [ ! -d "$TOOLS_DIR/isort-4.3.20" ]; then + print Installing isort + # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. + curl -o isort-4.3.20.tar.gz -L https://files.pythonhosted.org/packages/f1/84/5d66ddbe565e36682c336c841e51430384495b272c622ac229029f671be2/isort-4.3.20.tar.gz + tar xzf isort-4.3.20.tar.gz -C $TOOLS_DIR + rm isort-4.3.20.tar.gz +fi + +# Install pycodestyle. +print Checking if pycodestyle is installed in third_party +if [ ! -d "$TOOLS_DIR/pycodestyle-2.5.0" ]; then + print Installing pycodestyle + # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. + curl -o pycodestyle-2.5.0.tar.gz -L https://files.pythonhosted.org/packages/1c/d1/41294da5915f4cae7f4b388cea6c2cd0d6cd53039788635f6875dfe8c72f/pycodestyle-2.5.0.tar.gz + tar xzf pycodestyle-2.5.0.tar.gz -C $TOOLS_DIR + rm pycodestyle-2.5.0.tar.gz +fi + +# Install esprima. +print Checking if esprima is installed in third_party +if [ ! -d "$TOOLS_DIR/esprima-4.0.1" ]; then + print Installing esprima + # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. + curl -o esprima-4.0.1.tar.gz -L https://files.pythonhosted.org/packages/cc/a1/50fccd68a12bcfc27adfc9969c090286670a9109a0259f3f70943390b721/esprima-4.0.1.tar.gz + tar xzf esprima-4.0.1.tar.gz -C $TOOLS_DIR + rm esprima-4.0.1.tar.gz +fi + +# Python API for browsermob-proxy. +print Checking if browsermob-proxy is installed in $TOOLS_DIR +if [ ! -d "$TOOLS_DIR/browsermob-proxy-0.8.0" ]; then + print Installing browsermob-proxy + + pip_install browsermob-proxy==0.8.0 --target="$TOOLS_DIR/browsermob-proxy-0.8.0" +fi + +print Checking if selenium is installed in $TOOLS_DIR +if [ ! -d "$TOOLS_DIR/selenium-3.13.0" ]; then + print Installing selenium + + pip_install selenium==3.13.0 --target="$TOOLS_DIR/selenium-3.13.0" +fi + +print Checking if PyGithub is installed in $TOOLS_DIR +if [ ! -d "$TOOLS_DIR/PyGithub-1.43.7" ]; then + print Installing PyGithub + + pip_install PyGithub==1.43.7 --target="$TOOLS_DIR/PyGithub-1.43.7" +fi + +# install pre-commit script +print Installing pre-commit hook for git +$PYTHON_CMD $OPPIA_DIR/scripts/pre_commit_hook.py --install + +# install pre-push script +print Installing pre-push hook for git +$PYTHON_CMD $OPPIA_DIR/scripts/pre_push_hook.py --install From 77e21d7e06401c3265688f018887051a56fd8931 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 22 Aug 2019 23:07:56 +0530 Subject: [PATCH 011/141] convert --- scripts/install_third_party_libs.py | 328 ++++++++++++++-------------- scripts/setup.py | 15 -- 2 files changed, 170 insertions(+), 173 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index e0328a1bc88f..5c88f9968518 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -1,22 +1,42 @@ # Copyright 2019 The Oppia Authors. All Rights Reserved. # -# Licensed under the Apache License, Version 2.0 (the "License"); +# Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, +# distributed under the License is distributed on an 'AS-IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +"""Installation script for Oppia third-party libraries.""" + +import argparse +import fileinput +import os +import pip +import subprocess +import sys + from . import install_third_party +from . import setup + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--nojsrepl', + help='optional; if specified, skips installation of skulpt.', + action='store_true') +_PARSER.add_argument( + '--noskulpt', + help='optional; if specified, skips installation of skulpt.', + action='store_true') -set -e -source $(dirname $0)/setup.sh || exit 1 +_TARGET_STDOUT = StringIO.StringIO() +setup.main() # Download and install required JS and zip files. print 'Installing third-party JS libraries and zip files.' install_third_party.install_third_party_libs() @@ -24,9 +44,10 @@ curr_dir = os.path.abspath(os.getcwd()) oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') +third_party_dir = os.path.join('.', 'third_party') # Install third-party node modules needed for the build process. subprocess.call(('%s/bin/npm install --only=dev' % node_path).split()) -# This line removes the "npm ERR! missing:" messages. For reference, see this +# This line removes the 'npm ERR! missing:' messages. For reference, see this # thread: https://github.com/npm/npm/issues/19393#issuecomment-374076889 subprocess.call(('%s/bin/npm dedupe' % node_path).split()) @@ -38,169 +59,160 @@ # Note that skulpt.py will issue a warning saying its dist command will not # work properly without GitPython, but it does actually work due to the # patches. -print 'Checking whether Skulpt is installed in third_party' -if [ ! "$NO_SKULPT" -a ! -d "$THIRD_PARTY_DIR/static/skulpt-0.10.0" ]; then - if [ ! -d "$TOOLS_DIR/skulpt-0.10.0" ]; then - print Downloading Skulpt - cd $TOOLS_DIR - mkdir skulpt-0.10.0 - cd skulpt-0.10.0 - git clone https://github.com/skulpt/skulpt - cd skulpt - - # Use a specific Skulpt release. - git checkout 0.10.0 - - # Add a temporary backup file so that this script works on both Linux and - # Mac. - TMP_FILE=`mktemp /tmp/backup.XXXXXXXXXX` - - print Compiling Skulpt - - # The Skulpt setup function needs to be tweaked. It fails without certain - # third party commands. These are only used for unit tests and generating - # documentation and are not necessary when building Skulpt. - sed -e "s/ret = test()/ret = 0/" $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py |\ - sed -e "s/ doc()/ pass#doc()/" |\ - # This and the next command disable unit and compressed unit tests for the - # compressed distribution of Skulpt. These tests don't work on some - # Ubuntu environments and cause a libreadline dependency issue. - sed -e "s/ret = os.system(\"{0}/ret = 0 #os.system(\"{0}/" |\ - sed -e "s/ret = rununits(opt=True)/ret = 0/" > $TMP_FILE - mv $TMP_FILE $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py - $PYTHON_CMD $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py dist - - # Return to the Oppia root folder. - cd $OPPIA_DIR - fi - - # Move the build directory to the static resources folder. - mkdir -p $THIRD_PARTY_DIR/static/skulpt-0.10.0 - cp -r $TOOLS_DIR/skulpt-0.10.0/skulpt/dist/* $THIRD_PARTY_DIR/static/skulpt-0.10.0 -fi - -# Checking if pip is installed. If you are having -# trouble, please ensure that you have pip installed (see "Installing Oppia" -# on the Oppia developers' wiki page). -print Checking if pip is installed on the local machine -if ! type pip > /dev/null 2>&1 ; then - print "" - print " Pip is required to install Oppia dependencies, but pip wasn't found" - print " on your local machine." - print "" - print " Please see \"Installing Oppia\" on the Oppia developers' wiki page:" - - if [ "${OS}" == "Darwin" ] ; then - print " https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-OS%29" - else - print " https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux%29" - fi - - # If pip is not installed, quit. - exit 1 -fi - -function pip_install { - # Attempt standard pip install, or pass in --system if the local environment requires it. - # See https://github.com/pypa/pip/issues/3826 for context on when this situation may occur. - pip install "$@" || pip install --system "$@" -} - -print Checking if pylint is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/pylint-1.9.4" ]; then - print Installing Pylint - - pip_install pylint==1.9.4 --target="$TOOLS_DIR/pylint-1.9.4" -fi - -print Checking if Pillow is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/Pillow-6.0.0" ]; then - print Installing Pillow - pip_install Pillow==6.0.0 --target="$TOOLS_DIR/Pillow-6.0.0" +# We use parse_known_args() to ignore the extra arguments which maybe used +# while calling this method from other Python scripts. +parsed_args, _ = _PARSER.parse_known_args() +no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt - if [[ $? != 0 && ${OS} == "Darwin" ]]; then - print " Pillow install failed. See troubleshooting instructions at:" - print " https://github.com/oppia/oppia/wiki/Troubleshooting#mac-os" - fi - -fi +print 'Checking whether Skulpt is installed in third_party' +if not os.path.exists( + os.path.join( + third_party_dir, 'static/skulpt-0.10.0')) and not no_skulpt: + if not os.path.exists(os.path.join(oppia_tools_dir, 'skulpt-0.10.0')): + print 'Downloading Skulpt' + os.chdir(oppia_tools_dir) + os.mkdir('skulpt-0.10.0') + os.chdir('skulpt-0.10.0') + subprocess.call('git clone https://github.com/skulpt/skulpt'.split()) + os.chdir('skulpt') + + # Use a specific Skulpt release. + subprocess.call('git checkout 0.10.0'.split()) + + # Add a temporary backup file so that this script works on both Linux and + # Mac. + TMP_FILE='/tmp/backup.XXXXXXXXXX' + + print 'Compiling Skulpt' + + # The Skulpt setup function needs to be tweaked. It fails without certain + # third party commands. These are only used for unit tests and generating + # documentation and are not necessary when building Skulpt. + for line in fileinput.input( + os.path.join( + oppia_tools_dir, 'skulpt-0.10.0/skulpt/skulpt.py')): + # Inside this loop the STDOUT will be redirected to the file. The + # comma after each print statement is needed to avoid double line + # breaks. + with (sys.stdout = open('file', 'w')): + print line.replace('ret = test()', 'ret = 0'), + print line.replace(' doc()', ' pass#doc()'), + # This and the next command disable unit and compressed unit tests for the + # compressed distribution of Skulpt. These tests don't work on some + # Ubuntu environments and cause a libreadline dependency issue. + print line.replace( + 'ret = os.system(\'{0}', 'ret = 0 #os.system(\'{0}'), + print line.replace('ret = rununits(opt=True)', 'ret = 0'), + + sed -e 's///' > $TMP_FILE + mv $TMP_FILE $oppia_tools_dir/skulpt-0.10.0/skulpt/skulpt.py + $PYTHON_CMD $oppia_tools_dir/skulpt-0.10.0/skulpt/skulpt.py dist + + # Return to the Oppia root folder. + cd $OPPIA_DIR + fi + + # Move the build directory to the static resources folder. + mkdir -p $THIRD_PARTY_DIR/static/skulpt-0.10.0 + cp -r $oppia_tools_dir/skulpt-0.10.0/skulpt/dist/* $THIRD_PARTY_DIR/static/skulpt-0.10.0 + fi -print Checking if pylint-quotes is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/pylint-quotes-0.2.1" ]; then - print Installing pylint-quotes - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o pylint-quotes-0.2.1.tar.gz -L https://github.com/edaniszewski/pylint-quotes/archive/0.2.1.tar.gz - tar xzf pylint-quotes-0.2.1.tar.gz -C $TOOLS_DIR - rm pylint-quotes-0.2.1.tar.gz -fi +def pip_install(package, version, install_path): + try: + print 'Checking if pip is installed on the local machine' + import pip + except ImportError: + print 'Pip is required to install Oppia dependencies, but pip wasn\'t found' + print 'on your local machine.' + print '' + print 'Please see \'Installing Oppia\' on the Oppia developers\' wiki page:' + + os_info = os.uname() + if os_info[0] != 'Darwin': + print 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-OS%29' + elif os_info[0] != 'Linux': + print 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux%29' + else: + print 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows%29' + sys.exit(1) + + if hasattr(pip, 'main'): + pip.main(['install', package]) + else: + import pip._internal + pip._internal.main([ + 'install', '%s==%s' % (package, version), '--target', install_path]) + + +print 'Checking if pylint is installed in %s' % oppia_tools_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-1.9.4')): + print 'Installing Pylint' + pip_install('pylint', 1.9.4, os.path.join(oppia_tools_dir, 'pylint-1.9.4')) + +print 'Checking if Pillow is installed in %s' % oppia_tools_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'Pillow-6.0.0')): + print Installing Pillow + pip_install('Pillow', 6.0.0, os.path,join(oppia_tools_dir, 'Pillow-6.0.0')) + +print 'Checking if pylint-quotes is installed in %s' % oppia_tools_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')): + print 'Installing pylint-quotes' + pip_install( + 'pylint-quotes', 0.2.1, + os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')) # Install webtest. -print Checking if webtest is installed in third_party -if [ ! -d "$TOOLS_DIR/webtest-2.0.33" ]; then - print Installing webtest framework - # Note that the github URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o webtest-2.0.33.zip -L https://github.com/Pylons/webtest/archive/2.0.33.zip - unzip webtest-2.0.33.zip -d $TOOLS_DIR - rm webtest-2.0.33.zip -fi +print 'Checking if webtest is installed in %s' % third_party_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'webtest-2.0.33')): + print 'Installing webtest framework' + pip_install( + 'webtest', 2.0.33, os.path.join(oppia_tools_dir, 'webtest-2.0.33')) # Install isort. -print Checking if isort is installed in third_party -if [ ! -d "$TOOLS_DIR/isort-4.3.20" ]; then - print Installing isort - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o isort-4.3.20.tar.gz -L https://files.pythonhosted.org/packages/f1/84/5d66ddbe565e36682c336c841e51430384495b272c622ac229029f671be2/isort-4.3.20.tar.gz - tar xzf isort-4.3.20.tar.gz -C $TOOLS_DIR - rm isort-4.3.20.tar.gz -fi +print 'Checking if isort is installed in %s' % third_party_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'isort-4.3.20')) + print 'Installing isort' + pip_install('isort', 4.3.20, os.path.join(oppia_tools_dir, 'isort-4.3.20')) # Install pycodestyle. -print Checking if pycodestyle is installed in third_party -if [ ! -d "$TOOLS_DIR/pycodestyle-2.5.0" ]; then - print Installing pycodestyle - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o pycodestyle-2.5.0.tar.gz -L https://files.pythonhosted.org/packages/1c/d1/41294da5915f4cae7f4b388cea6c2cd0d6cd53039788635f6875dfe8c72f/pycodestyle-2.5.0.tar.gz - tar xzf pycodestyle-2.5.0.tar.gz -C $TOOLS_DIR - rm pycodestyle-2.5.0.tar.gz -fi +print 'Checking if pycodestyle is installed in %s' % third_party_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')) + print 'Installing pycodestyle' + pip_install( + 'pycodestyle', 2.5.0, + os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')) # Install esprima. -print Checking if esprima is installed in third_party -if [ ! -d "$TOOLS_DIR/esprima-4.0.1" ]; then - print Installing esprima - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o esprima-4.0.1.tar.gz -L https://files.pythonhosted.org/packages/cc/a1/50fccd68a12bcfc27adfc9969c090286670a9109a0259f3f70943390b721/esprima-4.0.1.tar.gz - tar xzf esprima-4.0.1.tar.gz -C $TOOLS_DIR - rm esprima-4.0.1.tar.gz -fi +print 'Checking if esprima is installed in %s' % third_party_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'esprima-4.0.1')): + print 'Installing esprima' + pip_install('esprima', 4.0.1, os.path.join(oppia_tools_dir, 'esprima-4.0.1')) # Python API for browsermob-proxy. -print Checking if browsermob-proxy is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/browsermob-proxy-0.8.0" ]; then - print Installing browsermob-proxy - - pip_install browsermob-proxy==0.8.0 --target="$TOOLS_DIR/browsermob-proxy-0.8.0" -fi - -print Checking if selenium is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/selenium-3.13.0" ]; then - print Installing selenium - - pip_install selenium==3.13.0 --target="$TOOLS_DIR/selenium-3.13.0" -fi - -print Checking if PyGithub is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/PyGithub-1.43.7" ]; then - print Installing PyGithub - - pip_install PyGithub==1.43.7 --target="$TOOLS_DIR/PyGithub-1.43.7" -fi - -# install pre-commit script -print Installing pre-commit hook for git -$PYTHON_CMD $OPPIA_DIR/scripts/pre_commit_hook.py --install - -# install pre-push script -print Installing pre-push hook for git -$PYTHON_CMD $OPPIA_DIR/scripts/pre_push_hook.py --install +print 'Checking if browsermob-proxy is installed in %s' % oppia_tools_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')): + print 'Installing browsermob-proxy' + pip_install( + 'browsermob-proxy', 0.8.0, + os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')) + +print 'Checking if selenium is installed in %s' % oppia_tools_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'selenium-3.13.0')): + print 'Installing selenium' + pip_install( + 'selenium', 3.13.0, os.path.join(oppia_tools_dir, 'selenium-3.13.0')) + +print 'Checking if PyGithub is installed in %s' % oppia_tools_dir +if not os.path.exists(os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')) + print 'Installing PyGithub' + pip_install( + 'PyGithub', 1.43.7, os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')) + +# Install pre-commit script. +print 'Installing pre-commit hook for git' +subprocess.call('python scripts/pre_commit_hook.py --install'.split()) + +# Install pre-push script. +print 'Installing pre-push hook for git' +subprocess.call('python scripts/pre_push_hook.py --install'.split()) diff --git a/scripts/setup.py b/scripts/setup.py index f2040e6efaae..2568b6999436 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -26,16 +26,6 @@ from . import build -_PARSER = argparse.ArgumentParser() -_PARSER.add_argument( - '--nojsrepl', - help='optional; if specified, skips installation of skulpt.', - action='store_true') -_PARSER.add_argument( - '--noskulpt', - help='optional; if specified, skips installation of skulpt.', - action='store_true') - def delete_directory_tree(directory_path): """Recursively delete an existing directory tree. Does not do anything if @@ -113,11 +103,6 @@ def main(): """Runs the script to setup Oppia.""" test_python_version() - # We use parse_known_args() to ignore the extra arguments which maybe used - # while calling this method from other Python scripts. - parsed_args, _ = _PARSER.parse_known_args() - os.environ['NO_SKULPT'] = str(parsed_args.nojsrepl or parsed_args.noskulpt) - # The second option allows this script to also be run from deployment # folders. if not os.getcwd().endswith('oppia') and not os.getcwd().endswith( From c5e3c97003e6b4fa858a2c9576a452fd2d2d9e7c Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Fri, 23 Aug 2019 00:09:17 +0530 Subject: [PATCH 012/141] fix lint --- scripts/install_third_party_libs.py | 408 ++++++++++++++++------------ scripts/setup.py | 3 +- scripts/setup_gae.py | 4 +- 3 files changed, 240 insertions(+), 175 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 5c88f9968518..3eb1e8f90102 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -14,205 +14,271 @@ """Installation script for Oppia third-party libraries.""" +import StringIO import argparse +import contextlib import fileinput import os -import pip +import shutil import subprocess import sys from . import install_third_party from . import setup -_PARSER = argparse.ArgumentParser() -_PARSER.add_argument( - '--nojsrepl', - help='optional; if specified, skips installation of skulpt.', - action='store_true') -_PARSER.add_argument( - '--noskulpt', - help='optional; if specified, skips installation of skulpt.', - action='store_true') - -_TARGET_STDOUT = StringIO.StringIO() - -setup.main() -# Download and install required JS and zip files. -print 'Installing third-party JS libraries and zip files.' -install_third_party.install_third_party_libs() - -curr_dir = os.path.abspath(os.getcwd()) -oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') -node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') -third_party_dir = os.path.join('.', 'third_party') -# Install third-party node modules needed for the build process. -subprocess.call(('%s/bin/npm install --only=dev' % node_path).split()) -# This line removes the 'npm ERR! missing:' messages. For reference, see this -# thread: https://github.com/npm/npm/issues/19393#issuecomment-374076889 -subprocess.call(('%s/bin/npm dedupe' % node_path).split()) - -# Download and install Skulpt. Skulpt is built using a Python script included -# within the Skulpt repository (skulpt.py). This script normally requires -# GitPython, however the patches to it below (with the sed operations) lead to -# it no longer being required. The Python script is used to avoid having to -# manually recreate the Skulpt dist build process in install_third_party.py. -# Note that skulpt.py will issue a warning saying its dist command will not -# work properly without GitPython, but it does actually work due to the -# patches. - -# We use parse_known_args() to ignore the extra arguments which maybe used -# while calling this method from other Python scripts. -parsed_args, _ = _PARSER.parse_known_args() -no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt - -print 'Checking whether Skulpt is installed in third_party' -if not os.path.exists( - os.path.join( - third_party_dir, 'static/skulpt-0.10.0')) and not no_skulpt: - if not os.path.exists(os.path.join(oppia_tools_dir, 'skulpt-0.10.0')): - print 'Downloading Skulpt' - os.chdir(oppia_tools_dir) - os.mkdir('skulpt-0.10.0') - os.chdir('skulpt-0.10.0') - subprocess.call('git clone https://github.com/skulpt/skulpt'.split()) - os.chdir('skulpt') - - # Use a specific Skulpt release. - subprocess.call('git checkout 0.10.0'.split()) - - # Add a temporary backup file so that this script works on both Linux and - # Mac. - TMP_FILE='/tmp/backup.XXXXXXXXXX' - - print 'Compiling Skulpt' - - # The Skulpt setup function needs to be tweaked. It fails without certain - # third party commands. These are only used for unit tests and generating - # documentation and are not necessary when building Skulpt. - for line in fileinput.input( - os.path.join( - oppia_tools_dir, 'skulpt-0.10.0/skulpt/skulpt.py')): - # Inside this loop the STDOUT will be redirected to the file. The - # comma after each print statement is needed to avoid double line - # breaks. - with (sys.stdout = open('file', 'w')): - print line.replace('ret = test()', 'ret = 0'), - print line.replace(' doc()', ' pass#doc()'), - # This and the next command disable unit and compressed unit tests for the - # compressed distribution of Skulpt. These tests don't work on some - # Ubuntu environments and cause a libreadline dependency issue. - print line.replace( - 'ret = os.system(\'{0}', 'ret = 0 #os.system(\'{0}'), - print line.replace('ret = rununits(opt=True)', 'ret = 0'), - - sed -e 's///' > $TMP_FILE - mv $TMP_FILE $oppia_tools_dir/skulpt-0.10.0/skulpt/skulpt.py - $PYTHON_CMD $oppia_tools_dir/skulpt-0.10.0/skulpt/skulpt.py dist - - # Return to the Oppia root folder. - cd $OPPIA_DIR - fi - - # Move the build directory to the static resources folder. - mkdir -p $THIRD_PARTY_DIR/static/skulpt-0.10.0 - cp -r $oppia_tools_dir/skulpt-0.10.0/skulpt/dist/* $THIRD_PARTY_DIR/static/skulpt-0.10.0 - fi +OPPIA_DIR = os.getcwd() + + +@contextlib.contextmanager +def _redirect_stdout(new_target): + """Redirect stdout to the new target. + + Args: + new_target: TextIOWrapper. The new target to which stdout is redirected. + + Yields: + TextIOWrapper. The new target. + """ + old_target = sys.stdout + sys.stdout = new_target + try: + yield new_target + finally: + sys.stdout = old_target + def pip_install(package, version, install_path): + """Installs third party libraries with pip. + + Args: + package: str. The package name. + version: str. The package version. + install_path: str. The installation path for the package. + """ try: print 'Checking if pip is installed on the local machine' import pip except ImportError: - print 'Pip is required to install Oppia dependencies, but pip wasn\'t found' + print ( + 'Pip is required to install Oppia dependencies, but pip wasn\'t ' + 'found') print 'on your local machine.' print '' - print 'Please see \'Installing Oppia\' on the Oppia developers\' wiki page:' + print ( + 'Please see \'Installing Oppia\' on the Oppia developers\' wiki ' + 'page:') os_info = os.uname() if os_info[0] != 'Darwin': - print 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-OS%29' + print( + 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-' + 'OS%29') elif os_info[0] != 'Linux': - print 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux%29' + print( + 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux' + '%29') else: - print 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows%29' + print( + 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28' + 'Windows%29') sys.exit(1) if hasattr(pip, 'main'): pip.main(['install', package]) else: import pip._internal - pip._internal.main([ + pip._internal.main(args=[ # pylint: disable=protected-access 'install', '%s==%s' % (package, version), '--target', install_path]) -print 'Checking if pylint is installed in %s' % oppia_tools_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-1.9.4')): - print 'Installing Pylint' - pip_install('pylint', 1.9.4, os.path.join(oppia_tools_dir, 'pylint-1.9.4')) - -print 'Checking if Pillow is installed in %s' % oppia_tools_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'Pillow-6.0.0')): - print Installing Pillow - pip_install('Pillow', 6.0.0, os.path,join(oppia_tools_dir, 'Pillow-6.0.0')) - -print 'Checking if pylint-quotes is installed in %s' % oppia_tools_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')): - print 'Installing pylint-quotes' - pip_install( - 'pylint-quotes', 0.2.1, - os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')) - -# Install webtest. -print 'Checking if webtest is installed in %s' % third_party_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'webtest-2.0.33')): - print 'Installing webtest framework' - pip_install( - 'webtest', 2.0.33, os.path.join(oppia_tools_dir, 'webtest-2.0.33')) - -# Install isort. -print 'Checking if isort is installed in %s' % third_party_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'isort-4.3.20')) - print 'Installing isort' - pip_install('isort', 4.3.20, os.path.join(oppia_tools_dir, 'isort-4.3.20')) - -# Install pycodestyle. -print 'Checking if pycodestyle is installed in %s' % third_party_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')) - print 'Installing pycodestyle' - pip_install( - 'pycodestyle', 2.5.0, - os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')) - -# Install esprima. -print 'Checking if esprima is installed in %s' % third_party_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'esprima-4.0.1')): - print 'Installing esprima' - pip_install('esprima', 4.0.1, os.path.join(oppia_tools_dir, 'esprima-4.0.1')) - -# Python API for browsermob-proxy. -print 'Checking if browsermob-proxy is installed in %s' % oppia_tools_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')): - print 'Installing browsermob-proxy' - pip_install( - 'browsermob-proxy', 0.8.0, - os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')) - -print 'Checking if selenium is installed in %s' % oppia_tools_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'selenium-3.13.0')): - print 'Installing selenium' - pip_install( - 'selenium', 3.13.0, os.path.join(oppia_tools_dir, 'selenium-3.13.0')) - -print 'Checking if PyGithub is installed in %s' % oppia_tools_dir -if not os.path.exists(os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')) - print 'Installing PyGithub' - pip_install( - 'PyGithub', 1.43.7, os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')) - -# Install pre-commit script. -print 'Installing pre-commit hook for git' -subprocess.call('python scripts/pre_commit_hook.py --install'.split()) - -# Install pre-push script. -print 'Installing pre-push hook for git' -subprocess.call('python scripts/pre_push_hook.py --install'.split()) +def main(): + """Install third-party libraries for Oppia.""" + + _parser = argparse.ArgumentParser() + _parser.add_argument( + '--nojsrepl', + help='optional; if specified, skips installation of skulpt.', + action='store_true') + _parser.add_argument( + '--noskulpt', + help='optional; if specified, skips installation of skulpt.', + action='store_true') + + setup.main() + + # Download and install required JS and zip files. + print 'Installing third-party JS libraries and zip files.' + install_third_party.install_third_party_libs() + + curr_dir = os.path.abspath(os.getcwd()) + oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') + node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') + third_party_dir = os.path.join('.', 'third_party') + + # Install third-party node modules needed for the build process. + subprocess.call(('%s/bin/npm install --only=dev' % node_path).split()) + # This line removes the 'npm ERR! missing:' messages. For reference, see + # this thread: https://github.com/npm/npm/issues/19393#issuecomment- + # 374076889. + subprocess.call(('%s/bin/npm dedupe' % node_path).split()) + + # Download and install Skulpt. Skulpt is built using a Python script + # included within the Skulpt repository (skulpt.py). This script normally + # requires GitPython, however the patches to it below + # (with the sed operations) lead to it no longer being required. The Python + # script is used to avoid having to manually recreate the Skulpt dist build + # process in install_third_party.py. Note that skulpt.py will issue a + # warning saying its dist command will not work properly without GitPython, + # but it does actually work due to the patches. + + # We use parse_known_args() to ignore the extra arguments which maybe used + # while calling this method from other Python scripts. + parsed_args, _ = _parser.parse_known_args() + no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt + + print 'Checking whether Skulpt is installed in third_party' + if not os.path.exists( + os.path.join( + third_party_dir, 'static/skulpt-0.10.0')) and not no_skulpt: + if not os.path.exists(os.path.join(oppia_tools_dir, 'skulpt-0.10.0')): + print 'Downloading Skulpt' + os.chdir(oppia_tools_dir) + os.mkdir('skulpt-0.10.0') + os.chdir('skulpt-0.10.0') + subprocess.call( + 'git clone https://github.com/skulpt/skulpt'.split()) + os.chdir('skulpt') + + # Use a specific Skulpt release. + subprocess.call('git checkout 0.10.0'.split()) + + # Add a temporary backup file so that this script works on both + # Linux and Mac. + tmp_file = '/tmp/backup.XXXXXXXXXX' + + print 'Compiling Skulpt' + target_stdout = StringIO.StringIO() + # The Skulpt setup function needs to be tweaked. It fails without + # certain third party commands. These are only used for unit tests + # and generating documentation and are not necessary when building + # Skulpt. + for line in fileinput.input( + files=os.path.join( + oppia_tools_dir, 'skulpt-0.10.0/skulpt/skulpt.py')): + # Inside this loop the STDOUT will be redirected to the file. + # The comma after each print statement is needed to avoid double + # line breaks. + with _redirect_stdout(target_stdout): + print line.replace('ret = test()', 'ret = 0'), + print line.replace(' doc()', ' pass#doc()'), + # This and the next command disable unit and compressed unit + # tests for the compressed distribution of Skulpt. These + # tests don't work on some Ubuntu environments and cause a + # libreadline dependency issue. + print line.replace( + 'ret = os.system(\'{0}', 'ret = 0 #os.system(\'{0}'), + print line.replace('ret = rununits(opt=True)', 'ret = 0'), + + temp_file_content = target_stdout.getvalue() + with open(tmp_file, 'w') as f: + f.write(temp_file_content) + + shutil.move( + tmp_file, os.path.join( + oppia_tools_dir, 'skulpt-0.10.0/skulpt/skulpt.py')) + subprocess.call( + 'python $oppia_tools_dir/skulpt-0.10.0/skulpt/skulpt.py dist' + .split()) + + # Return to the Oppia root folder. + os.chdir(OPPIA_DIR) + + # Move the build directory to the static resources folder. + os.makedirs(os.path.join(third_party_dir, 'static/skulpt-0.10.0')) + shutil.copytree( + os.path.join(oppia_tools_dir, 'skulpt-0.10.0/skulpt/dist/'), + os.path.join(third_party_dir, 'static/skulpt-0.10.0')) + + print 'Checking if pylint is installed in %s' % oppia_tools_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-1.9.4')): + print 'Installing Pylint' + pip_install( + 'pylint', '1.9.4', os.path.join(oppia_tools_dir, 'pylint-1.9.4')) + + print 'Checking if Pillow is installed in %s' % oppia_tools_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'Pillow-6.0.0')): + print 'Installing Pillow' + pip_install( + 'Pillow', '6.0.0', os.path.join(oppia_tools_dir, 'Pillow-6.0.0')) + + print 'Checking if pylint-quotes is installed in %s' % oppia_tools_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')): + print 'Installing pylint-quotes' + pip_install( + 'pylint-quotes', '0.2.1', + os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')) + + # Install webtest. + print 'Checking if webtest is installed in %s' % third_party_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'webtest-2.0.33')): + print 'Installing webtest framework' + pip_install( + 'webtest', '2.0.33', + os.path.join(oppia_tools_dir, 'webtest-2.0.33')) + + # Install isort. + print 'Checking if isort is installed in %s' % third_party_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'isort-4.3.20')): + print 'Installing isort' + pip_install( + 'isort', '4.3.20', os.path.join(oppia_tools_dir, 'isort-4.3.20')) + + # Install pycodestyle. + print 'Checking if pycodestyle is installed in %s' % third_party_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')): + print 'Installing pycodestyle' + pip_install( + 'pycodestyle', '2.5.0', + os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')) + + # Install esprima. + print 'Checking if esprima is installed in %s' % third_party_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'esprima-4.0.1')): + print 'Installing esprima' + pip_install( + 'esprima', '4.0.1', os.path.join(oppia_tools_dir, 'esprima-4.0.1')) + + # Python API for browsermob-proxy. + print 'Checking if browsermob-proxy is installed in %s' % oppia_tools_dir + if not os.path.exists( + os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')): + print 'Installing browsermob-proxy' + pip_install( + 'browsermob-proxy', '0.8.0', + os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')) + + print 'Checking if selenium is installed in %s' % oppia_tools_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'selenium-3.13.0')): + print 'Installing selenium' + pip_install( + 'selenium', '3.13.0', + os.path.join(oppia_tools_dir, 'selenium-3.13.0')) + + print 'Checking if PyGithub is installed in %s' % oppia_tools_dir + if not os.path.exists(os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')): + print 'Installing PyGithub' + pip_install( + 'PyGithub', '1.43.7', + os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')) + + # Install pre-commit script. + print 'Installing pre-commit hook for git' + subprocess.call('python scripts/pre_commit_hook.py --install'.split()) + + # Install pre-push script. + print 'Installing pre-push hook for git' + subprocess.call('python scripts/pre_push_hook.py --install'.split()) + + +if __name__ == '__main__': + main() diff --git a/scripts/setup.py b/scripts/setup.py index 2568b6999436..01ea82a3e75a 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -16,7 +16,6 @@ scripts. Python execution environent set up for all scripts. """ -import argparse import os import shutil import subprocess @@ -48,7 +47,7 @@ def create_directory(directory_path): """ if os.path.exists(directory_path): return - os.mkdir(directory_path) + os.makedirs(directory_path) def maybe_install_dependencies( diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 83a30b8eb066..26218835f2bf 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -53,7 +53,7 @@ def main(): % google_app_engine_home) if not os.path.exists(google_app_engine_home): print 'Downloading Google App Engine (this may take a little while)...' - os.mkdir(google_app_engine_home) + os.makedirs(google_app_engine_home) try: urllib.urlretrieve( 'https://storage.googleapis.com/appengine-sdks/featured/' @@ -73,7 +73,7 @@ def main(): % google_cloud_sdk_home) if not os.path.exists(google_cloud_sdk_home): print 'Downloading Google Cloud SDK (this may take a little while)...' - os.mkdir(google_cloud_sdk_home) + os.makedirs(google_cloud_sdk_home) try: urllib.urlretrieve( 'https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/' From a5eebebee315054fabf84802cfd4c80318dffd38 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Fri, 23 Aug 2019 02:05:29 +0530 Subject: [PATCH 013/141] convert --- scripts/create_expression_parser.py | 68 +++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 scripts/create_expression_parser.py diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py new file mode 100644 index 000000000000..0298e2f626be --- /dev/null +++ b/scripts/create_expression_parser.py @@ -0,0 +1,68 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""INSTRUCTIONS: +Run this script from the oppia root folder: + python -m scripts.create_expression_parser +The root folder MUST be named 'oppia'. +It produces the expression parser. +""" + +import fileinput +import os +import re +import subprocess + +from . import install_third_party_libs +from . import setup + + +def main(): + """Produces the expression parser.""" + setup.main() + + expression_parser_definition = ( + 'core/templates/dev/head/expressions/parser.pegjs') + expression_parser_js = ( + 'core/templates/dev/head/expressions/ExpressionParserService.js') + + # Install the basic environment, e.g. nodejs. + install_third_party_libs.main() + + curr_dir = os.path.abspath(os.getcwd()) + oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') + node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') + + print 'Checking whether pegjs is installed in %s' % oppia_tools_dir + if not os.path.exists('node_modules/pegjs'): + print 'Installing pegjs' + subprocess.call(('%s/bin/npm install pegjs@0.8.0' % node_path).split()) + + subprocess.call(( + 'node_modules/pegjs/bin/pegjs %s %s' + % (expression_parser_definition, expression_parser_js)).split()) + + for line in fileinput.input(files=expression_parser_js, inplace=True): + print re.sub( + r'module\.exports.*$', + 'angular.module(\'oppia\').factory(' + '\'ExpressionParserService\', [\'$log\', function($log) {', line), + + print re.sub(r'^})();\s*$', '}]);', line), + + print 'Done!' + + +if __name__ == '__main__': + main() From f9a85861c87a7e8a88fb9f4e5c97e09a65009782 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Fri, 23 Aug 2019 03:22:07 +0530 Subject: [PATCH 014/141] convert --- scripts/install_chrome_on_travis.py | 50 +++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 scripts/install_chrome_on_travis.py diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py new file mode 100644 index 000000000000..578a43a61aa6 --- /dev/null +++ b/scripts/install_chrome_on_travis.py @@ -0,0 +1,50 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script should only be ran by Travis to install and provide a constant +version of Chrome. +CHROME_SOURCE_URL is an environment variable set in Oppia's Travis repo +settings. It can be found under 'Environment Variables' header here: +https://travis-ci.org/oppia/oppia/settings. +""" + +import os +import subprocess +import urllib + + +def main(): + """Installs and provides a constant version of Chrome.""" + home_directory = os.environ.get('HOME') + oppia_dir = os.getcwd() + chrome_source_url = os.environ.get('CHROME_SOURCE_URL') + travis_chrome_path = os.path.join( + home_directory, '.cache/TravisChrome/', + os.path.basename(chrome_source_url)) + + if not os.path.isfile(travis_chrome_path): + # Caching Chrome's Debian package after download to prevent connection + # problem. + os.makedirs(os.path.join(home_directory, '.cache/TravisChrome/')) + os.chdir(os.path.join(home_directory, '.cache/TravisChrome/')) + urllib.urlretrieve( + chrome_source_url, filename=os.path.basename(chrome_source_url)) + os.chdir(oppia_dir) + + print 'Installing %s' % travis_chrome_path + subprocess.call(('sudo dpkg -i %s' % travis_chrome_path).split()) + + +if __name__ == '__main__': + main() From dc1845c1337a818971f17b09c0af7a281b608fc5 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 24 Aug 2019 03:44:42 +0530 Subject: [PATCH 015/141] convert --- scripts/vagrant_lock.py | 65 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 scripts/vagrant_lock.py diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py new file mode 100644 index 000000000000..fed9b7c02388 --- /dev/null +++ b/scripts/vagrant_lock.py @@ -0,0 +1,65 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This file should not be invoked directly, but sourced from other sh scripts. + +Creates a lockfile to help with new user confusion when launching a vagrant +vm. See https://github.com/oppia/oppia/pull/2749 for details. + +It can be overridden by passing --nolock to start.sh. +""" +from __future__ import absolute_import # pylint: disable=import-only-modules + +import argparse +import os +import sys + +import python_utils + +from . import clean + + +def main(): + """Creates a lockfile.""" + vagrant_lock_file = './.lock' + + _parser = argparse.ArgumentParser() + _parser.add_argument( + '--nolock', + help='optional; if specified, skips creation of lockfile', + action='store_true') + parsed_args = _parser.parse_args() + if parsed_args.nolock: + clean.delete_file(vagrant_lock_file) + sys.exit(0) + + if os.path.isfile(vagrant_lock_file): + python_utils.PRINT('') + python_utils.PRINT('Another setup instance is already running') + python_utils.PRINT('') + python_utils.PRINT( + 'Please wait for that instance to complete or terminate it') + python_utils.PRINT('') + python_utils.PRINT( + 'If you ran $0 twice on purpose, you can override this with ' + '--nolock') + python_utils.PRINT('') + sys.exit(1) + else: + os.utime(vagrant_lock_file, None) + clean.delete_file(vagrant_lock_file) + + +if __name__ == '__main__': + main() From eee27b9cc2cabb411bbd285d990de62b7a759822 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 03:46:10 +0530 Subject: [PATCH 016/141] convert --- scripts/install_third_party_libs.py | 116 +++++++++++++++++----------- scripts/pre_commit_linter.py | 3 +- 2 files changed, 74 insertions(+), 45 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 3eb1e8f90102..c331f0cba32e 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -13,8 +13,8 @@ # limitations under the License. """Installation script for Oppia third-party libraries.""" +from __future__ import absolute_import # pylint: disable=import-only-modules -import StringIO import argparse import contextlib import fileinput @@ -23,6 +23,8 @@ import subprocess import sys +import python_utils + from . import install_third_party from . import setup @@ -56,29 +58,29 @@ def pip_install(package, version, install_path): install_path: str. The installation path for the package. """ try: - print 'Checking if pip is installed on the local machine' + python_utils.PRINT('Checking if pip is installed on the local machine') import pip except ImportError: - print ( + python_utils.PRINT( 'Pip is required to install Oppia dependencies, but pip wasn\'t ' 'found') - print 'on your local machine.' - print '' - print ( + python_utils.PRINT('on your local machine.') + python_utils.PRINT('') + python_utils.PRINT( 'Please see \'Installing Oppia\' on the Oppia developers\' wiki ' 'page:') os_info = os.uname() if os_info[0] != 'Darwin': - print( + python_utils.PRINT( 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-' 'OS%29') elif os_info[0] != 'Linux': - print( + python_utils.PRINT( 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux' '%29') else: - print( + python_utils.PRINT( 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28' 'Windows%29') sys.exit(1) @@ -107,7 +109,7 @@ def main(): setup.main() # Download and install required JS and zip files. - print 'Installing third-party JS libraries and zip files.' + python_utils.PRINT('Installing third-party JS libraries and zip files.') install_third_party.install_third_party_libs() curr_dir = os.path.abspath(os.getcwd()) @@ -136,12 +138,12 @@ def main(): parsed_args, _ = _parser.parse_known_args() no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt - print 'Checking whether Skulpt is installed in third_party' + python_utils.PRINT('Checking whether Skulpt is installed in third_party') if not os.path.exists( os.path.join( third_party_dir, 'static/skulpt-0.10.0')) and not no_skulpt: if not os.path.exists(os.path.join(oppia_tools_dir, 'skulpt-0.10.0')): - print 'Downloading Skulpt' + python_utils.PRINT('Downloading Skulpt') os.chdir(oppia_tools_dir) os.mkdir('skulpt-0.10.0') os.chdir('skulpt-0.10.0') @@ -156,8 +158,8 @@ def main(): # Linux and Mac. tmp_file = '/tmp/backup.XXXXXXXXXX' - print 'Compiling Skulpt' - target_stdout = StringIO.StringIO() + python_utils.PRINT('Compiling Skulpt') + target_stdout = python_utils.string_io() # The Skulpt setup function needs to be tweaked. It fails without # certain third party commands. These are only used for unit tests # and generating documentation and are not necessary when building @@ -166,21 +168,30 @@ def main(): files=os.path.join( oppia_tools_dir, 'skulpt-0.10.0/skulpt/skulpt.py')): # Inside this loop the STDOUT will be redirected to the file. - # The comma after each print statement is needed to avoid double - # line breaks. + # The comma after each python_utils.PRINT statement is needed to + # avoid double line breaks. with _redirect_stdout(target_stdout): - print line.replace('ret = test()', 'ret = 0'), - print line.replace(' doc()', ' pass#doc()'), + python_utils.PRINT( + line.replace('ret = test()', 'ret = 0'), + end='') + python_utils.PRINT( + line.replace(' doc()', ' pass#doc()'), + end='') # This and the next command disable unit and compressed unit # tests for the compressed distribution of Skulpt. These # tests don't work on some Ubuntu environments and cause a # libreadline dependency issue. - print line.replace( - 'ret = os.system(\'{0}', 'ret = 0 #os.system(\'{0}'), - print line.replace('ret = rununits(opt=True)', 'ret = 0'), + python_utils.PRINT( + line.replace( + 'ret = os.system(\'{0}', + 'ret = 0 #os.system(\'{0}'), + end='') + python_utils.PRINT( + line.replace('ret = rununits(opt=True)', 'ret = 0'), + end='') temp_file_content = target_stdout.getvalue() - with open(tmp_file, 'w') as f: + with python_utils.open_file(tmp_file, 'w') as f: f.write(temp_file_content) shutil.move( @@ -199,84 +210,101 @@ def main(): os.path.join(oppia_tools_dir, 'skulpt-0.10.0/skulpt/dist/'), os.path.join(third_party_dir, 'static/skulpt-0.10.0')) - print 'Checking if pylint is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking if pylint is installed in %s' % oppia_tools_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-1.9.4')): - print 'Installing Pylint' + python_utils.PRINT('Installing Pylint') pip_install( 'pylint', '1.9.4', os.path.join(oppia_tools_dir, 'pylint-1.9.4')) - print 'Checking if Pillow is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking if Pillow is installed in %s' % oppia_tools_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'Pillow-6.0.0')): - print 'Installing Pillow' + python_utils.PRINT('Installing Pillow') pip_install( 'Pillow', '6.0.0', os.path.join(oppia_tools_dir, 'Pillow-6.0.0')) - print 'Checking if pylint-quotes is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking if pylint-quotes is installed in %s' % oppia_tools_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')): - print 'Installing pylint-quotes' + python_utils.PRINT('Installing pylint-quotes') pip_install( 'pylint-quotes', '0.2.1', os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')) # Install webtest. - print 'Checking if webtest is installed in %s' % third_party_dir + python_utils.PRINT( + 'Checking if webtest is installed in %s' % third_party_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'webtest-2.0.33')): - print 'Installing webtest framework' + python_utils.PRINT('Installing webtest framework') pip_install( 'webtest', '2.0.33', os.path.join(oppia_tools_dir, 'webtest-2.0.33')) # Install isort. - print 'Checking if isort is installed in %s' % third_party_dir + python_utils.PRINT('Checking if isort is installed in %s' % third_party_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'isort-4.3.20')): - print 'Installing isort' + python_utils.PRINT('Installing isort') pip_install( 'isort', '4.3.20', os.path.join(oppia_tools_dir, 'isort-4.3.20')) # Install pycodestyle. - print 'Checking if pycodestyle is installed in %s' % third_party_dir + python_utils.PRINT( + 'Checking if pycodestyle is installed in %s' % third_party_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')): - print 'Installing pycodestyle' + python_utils.PRINT('Installing pycodestyle') pip_install( 'pycodestyle', '2.5.0', os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')) # Install esprima. - print 'Checking if esprima is installed in %s' % third_party_dir + python_utils.PRINT( + 'Checking if esprima is installed in %s' % third_party_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'esprima-4.0.1')): - print 'Installing esprima' + python_utils.PRINT('Installing esprima') pip_install( 'esprima', '4.0.1', os.path.join(oppia_tools_dir, 'esprima-4.0.1')) # Python API for browsermob-proxy. - print 'Checking if browsermob-proxy is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking if browsermob-proxy is installed in %s' % oppia_tools_dir) if not os.path.exists( os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')): - print 'Installing browsermob-proxy' + python_utils.PRINT('Installing browsermob-proxy') pip_install( 'browsermob-proxy', '0.8.0', os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')) - print 'Checking if selenium is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking if selenium is installed in %s' % oppia_tools_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'selenium-3.13.0')): - print 'Installing selenium' + python_utils.PRINT('Installing selenium') pip_install( 'selenium', '3.13.0', os.path.join(oppia_tools_dir, 'selenium-3.13.0')) - print 'Checking if PyGithub is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking if PyGithub is installed in %s' % oppia_tools_dir) if not os.path.exists(os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')): - print 'Installing PyGithub' + python_utils.PRINT('Installing PyGithub') pip_install( 'PyGithub', '1.43.7', os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')) + python_utils.PRINT( + 'Checking if psutil is installed in %s' % oppia_tools_dir) + if not os.path.exists(os.path.join(oppia_tools_dir, 'psutil-5.6.3')): + python_utils.PRINT('Installing psutil') + pip_install( + 'psutil', '5.6.3', + os.path.join(oppia_tools_dir, 'psutil-5.6.3')) + # Install pre-commit script. - print 'Installing pre-commit hook for git' + python_utils.PRINT('Installing pre-commit hook for git') subprocess.call('python scripts/pre_commit_hook.py --install'.split()) # Install pre-push script. - print 'Installing pre-push hook for git' + python_utils.PRINT('Installing pre-push hook for git') subprocess.call('python scripts/pre_push_hook.py --install'.split()) diff --git a/scripts/pre_commit_linter.py b/scripts/pre_commit_linter.py index 370f964cce39..fb9f07bd5409 100644 --- a/scripts/pre_commit_linter.py +++ b/scripts/pre_commit_linter.py @@ -399,7 +399,7 @@ 'excluded_dirs': () }, { - 'regexp': re.compile(r'\Winput\('), + 'regexp': re.compile(r'[^.|\w]input\('), 'message': 'Please use python_utils.INPUT.', 'excluded_files': (), 'excluded_dirs': () @@ -577,6 +577,7 @@ os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-quotes-0.2.1'), os.path.join(_PARENT_DIR, 'oppia_tools', 'selenium-3.13.0'), os.path.join(_PARENT_DIR, 'oppia_tools', 'PyGithub-1.43.7'), + os.path.join(_PARENT_DIR, 'oppia_tools', 'psutil-5.6.3'), os.path.join(_PARENT_DIR, 'oppia_tools', 'Pillow-6.0.0'), os.path.join('third_party', 'backports.functools_lru_cache-1.5'), os.path.join('third_party', 'beautifulsoup4-4.7.1'), From a6dc5bbbea6dbc3e44a0fc544e46ca986be16d8a Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 03:46:19 +0530 Subject: [PATCH 017/141] convert --- scripts/run_e2e_tests.py | 274 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 274 insertions(+) create mode 100644 scripts/run_e2e_tests.py diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py new file mode 100644 index 000000000000..167c7f1b4cbe --- /dev/null +++ b/scripts/run_e2e_tests.py @@ -0,0 +1,274 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""INSTRUCTIONS: + +Run this script from the oppia root folder: + bash scripts/run_e2e_tests.sh + +Optional arguments: + --browserstack Run the tests on browserstack using the + protractor-browserstack.conf.js file. + --skip-install=true/false If true, skips installing dependencies. The + default value is false. + --sharding=true/false Disables/Enables parallelization of protractor tests. + --sharding-instances=# Sets the number of parallel browsers to open while + sharding. + --prod_env Run the tests in prod mode. Static resources are served from + build directory and use cache slugs. +Sharding must be disabled (either by passing in false to --sharding or 1 to +--sharding-instances) if running any tests in isolation (fit or fdescribe). + --suite=suite_name Performs test for different suites, here suites are the + name of the test files present in core/tests/protractor_desktop/ and + core/test/protractor/ dirs. e.g. for the file + core/tests/protractor/accessibility.js use --suite=accessibility. + For performing a full test, no argument is required. + +The root folder MUST be named 'oppia'. + +Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a +single test or test suite. +""" +from __future__ import absolute_import # pylint: disable=import-only-modules + +import argparse +import atexit +import fileinput +import os +import shutil +import signal +import socket +import subprocess +import sys +import time + +import python_utils + +from . import build +from . import install_chrome_on_travis +from . import setup +from . import setup_gae + +_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +_PSUTIL_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'psutil-5.6.3') +sys.path.insert(0, _PSUTIL_PATH) + +import psutil # isort:skip # pylint: disable=wrong-import-position + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--skip_install', + help='optional; if specified, skips installing dependencies', + action='store_true') +_PARSER.add_argument( + '--run_minified_tests', + help='optional; if specified, runs frontend karma tests on both minified ' + 'and non-minified code', + action='store_true') +_PARSER.add_argument( + '--prod_env', + help='optional; if specified, emulate running Oppia in a production ' + 'environment.', + action='store_true') +_PARSER.add_argument( + '--browserstack', + help='optional; if specified, run the e2e tests on browserstack.', + action='store_true') +_PARSER.add_argument( + '--suite', + help='Performs test for different suites. Performs a full test by default.', + default='full') +_PARSER.add_argument( + '--sharding', + help='optional; if specified, Disables parallelization of protractor tests', + action='store_true') +_PARSER.add_argument( + '--sharding_instances', + help='Sets the number of parallel browsers to open while sharding', + default='3') + + +# Credits: https://stackoverflow.com/a/20691431/11755830 +def kill_process(port): + """Kills a process that is listening to a specific port. + + Args: + port: int. The port number. + """ + for process in psutil.process_iter(): + for conns in process.connections(kind='inet'): + if conns.laddr.port == port: + process.send_signal(signal.SIGTERM) + + +def cleanup(): + """Send a kill signal to the dev server and Selenium server.""" + kill_process(4444) + kill_process(9001) + + # Wait for the servers to go down; suppress 'connection refused' error + # output from nc since that is exactly what we are expecting to happen. + while not is_port_open(4444) or not is_port_open(9001): + time.sleep(1) + + if os.path.isdir('../protractor-screenshots'): + python_utils.PRINT('') + python_utils.PRINT( + 'Note: If ADD_SCREENSHOT_REPORTER is set to true in') + python_utils.PRINT( + 'core/tests/protractor.conf.js, you can view screenshots') + python_utils.PRINT('of the failed tests in ../protractor-screenshots/') + python_utils.PRINT('') + + python_utils.PRINT('Done!') + + +def is_port_open(port): + """Checks if no process is listening to the port. + + Args: + port: int. The port number. + + Return: + bool. True if port is open else False. + """ + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + result = sock.connect_ex(('localhost', port)) + sock.close() + return bool(result) + + +def main(): + """Runs the end to end tests.""" + setup.main() + setup_gae.main() + if os.environ.get('TRAVIS'): + install_chrome_on_travis.main() + + parsed_args = _PARSER.parse_args() + setup.maybe_install_dependencies( + parsed_args.skip_install, parsed_args.run_minified_tests) + + if not is_port_open(8181): + python_utils.PRINT('') + python_utils.PRINT( + 'There is already a server running on localhost:8181.') + python_utils.PRINT( + 'Please terminate it before running the end-to-end tests.') + python_utils.PRINT('Exiting.') + python_utils.PRINT('') + sys.exit(1) + + # Forces the cleanup function to run on exit. + # Developers: note that at the end of this script, the cleanup() function at + # the top of the file is run. + atexit.register(cleanup) + signal.signal(signal.SIGINT, cleanup) + + if parsed_args.prod_env: + python_utils.PRINT('Generating files for production mode...') + constants_env_variable = '\'DEV_MODE\': false' + for line in fileinput.input(files='assets/constants.js', inplace=True): + python_utils.PRINT( + line.replace('\'DEV_MODE\': .*', constants_env_variable), + end='') + subprocess.call('python scripts/build.py --prod_env'.split()) + app_yaml_filepath = 'app.yaml' + else: + constants_env_variable = '\'DEV_MODE\': true' + for line in fileinput.input(files='assets/constants.js', inplace=True): + python_utils.PRINT( + line.replace('\'DEV_MODE\': .*', constants_env_variable), + end='') + build.build() + app_yaml_filepath = 'app_dev.yaml' + + # Delete the modified feconf.py file(-i.bak) + os.remove('assets/constants.js.bak') + + # Start a selenium server using chromedriver 2.41. + # The 'detach' option continues the flow once the server is up and runnning. + # The 'quiet' option prints only the necessary information about the server + # start-up process. + subprocess.call( + 'node_modules/.bin/webdriver-manager update --versions.chrome 2.41' + .split()) + subprocess.call( + 'node_modules/.bin/webdriver-manager start --versions.chrome 2.41 ' + '--detach --quiet'.split()) + + # Start a selenium process. The program sends thousands of lines of useless + # info logs to stderr so we discard them. + # TODO(jacob): Find a webdriver or selenium argument that controls log + # level. + subprocess.call( + 'node_modules/.bin/webdriver-manager start 2>/dev/null)&'.split()) + # Start a demo server. + curr_dir = os.path.abspath(os.getcwd()) + oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') + google_app_engine_home = os.path.join( + oppia_tools_dir, 'google_appengine_1.9.67/google_appengine') + subprocess.call( + ('python %s/dev_appserver.py --host=0.0.0.0 --port=9001 ' + '--clear_datastore=yes --dev_appserver_log_level=critical ' + '--log_level=critical --skip_sdk_update_check=true $%s)&' + % (google_app_engine_home, app_yaml_filepath)).split()) + + # Wait for the servers to come up. + while is_port_open(4444) or is_port_open(9001): + time.sleep(1) + + # Delete outdated screenshots. + if os.path.isdir('../protractor-screenshots'): + shutil.rmtree('../protractor-screenshots') + + # Run the end-to-end tests. The conditional is used to run protractor + # without any sharding parameters if it is disabled. This helps with + # isolated tests. Isolated tests do not work properly unless no sharding + # parameters are passed in at all. + # TODO(bhenning): Figure out if this is a bug with protractor. + if not parsed_args.browserstack: + if not parsed_args.sharding or parsed_args.sharding_instances == '1': + subprocess.call(( + 'node_modules/protractor/bin/protractor ' + 'core/tests/protractor.conf.js --suite %s' + % parsed_args.suite).split()) + else: + subprocess.call(( + 'node_modules/protractor/bin/protractor ' + 'core/tests/protractor.conf.js --capabilities.shardTestFiles=%s' + ' --capabilities.maxInstances=%s --suite %s' + % ( + parsed_args.sharding, parsed_args.sharding_instances, + parsed_args.suite)).split()) + else: + python_utils.PRINT('Running the tests on browserstack...') + if not parsed_args.sharding or parsed_args.sharding_instances == '1': + subprocess.call( + ('node_modules/protractor/bin/protractor ' + 'core/tests/protractor-browserstack.conf.js --suite %s ' + % parsed_args.suite).split()) + else: + subprocess.call(( + 'node_modules/protractor/bin/protractor ' + 'core/tests/protractor-browserstack.conf.js ' + '--capabilities.shardTestFiles=%s --capabilities.maxInstances=' + '%s --suite %s' + % ( + parsed_args.sharding, parsed_args.sharding_instances, + parsed_args.suite)).split()) + + +if __name__ == '__main__': + main() From e88fd13b980dbba1263b406944ac927de5dcbe43 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 04:11:56 +0530 Subject: [PATCH 018/141] address --- scripts/install_third_party_libs.py | 123 +++++++--------------------- 1 file changed, 29 insertions(+), 94 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index c331f0cba32e..c376a10296a0 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -106,17 +106,41 @@ def main(): help='optional; if specified, skips installation of skulpt.', action='store_true') + curr_dir = os.path.abspath(os.getcwd()) + oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') + node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') + third_party_dir = os.path.join('.', 'third_party') + + pip_dependencies = [ + ('future', '0.17.1', third_party_dir), + ('pylint', '1.9.4', oppia_tools_dir), + ('Pillow', '6.0.0', oppia_tools_dir), + ('pylint-quotes', '0.2.1', oppia_tools_dir), + ('webtest', '2.0.33', oppia_tools_dir), + ('isort', '4.3.20', oppia_tools_dir), + ('pycodestyle', '2.5.0', oppia_tools_dir), + ('esprima', '4.0.1', oppia_tools_dir), + ('browsermob-proxy', '0.8.0', oppia_tools_dir), + ('selenium', '3.13.0', oppia_tools_dir), + ('PyGithub', '1.43.7', oppia_tools_dir), + ('psutil', '5.6.3', oppia_tools_dir), + ] + + for package, version, path in pip_dependencies: + python_utils.PRINT( + 'Checking if %s is installed in %s' % (package, path)) + + exact_lib_path = os.path.join(path, '%s-%s' % (package, version)) + if not os.path.exists(exact_lib_path): + python_utils.PRINT('Installing %s' % package) + pip_install(package, version, exact_lib_path) + setup.main() # Download and install required JS and zip files. python_utils.PRINT('Installing third-party JS libraries and zip files.') install_third_party.install_third_party_libs() - curr_dir = os.path.abspath(os.getcwd()) - oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') - third_party_dir = os.path.join('.', 'third_party') - # Install third-party node modules needed for the build process. subprocess.call(('%s/bin/npm install --only=dev' % node_path).split()) # This line removes the 'npm ERR! missing:' messages. For reference, see @@ -210,95 +234,6 @@ def main(): os.path.join(oppia_tools_dir, 'skulpt-0.10.0/skulpt/dist/'), os.path.join(third_party_dir, 'static/skulpt-0.10.0')) - python_utils.PRINT( - 'Checking if pylint is installed in %s' % oppia_tools_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-1.9.4')): - python_utils.PRINT('Installing Pylint') - pip_install( - 'pylint', '1.9.4', os.path.join(oppia_tools_dir, 'pylint-1.9.4')) - - python_utils.PRINT( - 'Checking if Pillow is installed in %s' % oppia_tools_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'Pillow-6.0.0')): - python_utils.PRINT('Installing Pillow') - pip_install( - 'Pillow', '6.0.0', os.path.join(oppia_tools_dir, 'Pillow-6.0.0')) - - python_utils.PRINT( - 'Checking if pylint-quotes is installed in %s' % oppia_tools_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')): - python_utils.PRINT('Installing pylint-quotes') - pip_install( - 'pylint-quotes', '0.2.1', - os.path.join(oppia_tools_dir, 'pylint-quotes-0.2.1')) - - # Install webtest. - python_utils.PRINT( - 'Checking if webtest is installed in %s' % third_party_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'webtest-2.0.33')): - python_utils.PRINT('Installing webtest framework') - pip_install( - 'webtest', '2.0.33', - os.path.join(oppia_tools_dir, 'webtest-2.0.33')) - - # Install isort. - python_utils.PRINT('Checking if isort is installed in %s' % third_party_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'isort-4.3.20')): - python_utils.PRINT('Installing isort') - pip_install( - 'isort', '4.3.20', os.path.join(oppia_tools_dir, 'isort-4.3.20')) - - # Install pycodestyle. - python_utils.PRINT( - 'Checking if pycodestyle is installed in %s' % third_party_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')): - python_utils.PRINT('Installing pycodestyle') - pip_install( - 'pycodestyle', '2.5.0', - os.path.join(oppia_tools_dir, 'pycodestyle-2.5.0')) - - # Install esprima. - python_utils.PRINT( - 'Checking if esprima is installed in %s' % third_party_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'esprima-4.0.1')): - python_utils.PRINT('Installing esprima') - pip_install( - 'esprima', '4.0.1', os.path.join(oppia_tools_dir, 'esprima-4.0.1')) - - # Python API for browsermob-proxy. - python_utils.PRINT( - 'Checking if browsermob-proxy is installed in %s' % oppia_tools_dir) - if not os.path.exists( - os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')): - python_utils.PRINT('Installing browsermob-proxy') - pip_install( - 'browsermob-proxy', '0.8.0', - os.path.join(oppia_tools_dir, 'browsermob-proxy-0.8.0')) - - python_utils.PRINT( - 'Checking if selenium is installed in %s' % oppia_tools_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'selenium-3.13.0')): - python_utils.PRINT('Installing selenium') - pip_install( - 'selenium', '3.13.0', - os.path.join(oppia_tools_dir, 'selenium-3.13.0')) - - python_utils.PRINT( - 'Checking if PyGithub is installed in %s' % oppia_tools_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')): - python_utils.PRINT('Installing PyGithub') - pip_install( - 'PyGithub', '1.43.7', - os.path.join(oppia_tools_dir, 'PyGithub-1.43.7')) - - python_utils.PRINT( - 'Checking if psutil is installed in %s' % oppia_tools_dir) - if not os.path.exists(os.path.join(oppia_tools_dir, 'psutil-5.6.3')): - python_utils.PRINT('Installing psutil') - pip_install( - 'psutil', '5.6.3', - os.path.join(oppia_tools_dir, 'psutil-5.6.3')) - # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') subprocess.call('python scripts/pre_commit_hook.py --install'.split()) From c3739195ee8bb5f068b81238329f2e3a49e1523d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 04:24:40 +0530 Subject: [PATCH 019/141] fix path --- scripts/backend_tests.py | 3 +-- scripts/run_backend_tests.py | 15 +++++---------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index fbaa45cf4da8..b70e74513224 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -70,8 +70,7 @@ ] COVERAGE_PATH = os.path.join( - os.getcwd(), '..', 'oppia_tools', 'coverage-4.5.4', 'coverage-4.5.4', - 'coverage') + os.getcwd(), '..', 'oppia_tools', 'coverage-4.5.4', 'coverage') TEST_RUNNER_PATH = os.path.join(os.getcwd(), 'core', 'tests', 'gae_suite.py') LOG_LOCK = threading.Lock() ALL_ERRORS = [] diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index ac7b75c4f2bb..23f56d3406c9 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -63,6 +63,7 @@ from . import backend_tests from . import build +from . import install_third_party_libs from . import setup from . import setup_gae @@ -84,22 +85,16 @@ def main(): curr_dir = os.path.abspath(os.getcwd()) oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.4') - coverage_path = os.path.join(coverage_home, 'coverage-4.5.4', 'coverage') + coverage_path = os.path.join(coverage_home, 'coverage') parsed_args, _ = _PARSER.parse_known_args() if parsed_args.generate_coverage_report: print 'Checking whether coverage is installed in %s' % oppia_tools_dir if not os.path.exists(os.path.join(oppia_tools_dir, 'coverage-4.5.4')): print 'Installing coverage' - urllib.urlretrieve( - 'https://files.pythonhosted.org/packages/85/d5/' - '818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/' - 'coverage-4.5.4.tar.gz', filename='coverage-4.5.4.tar.gz') - tar = tarfile.open(name='coverage-4.5.4.tar.gz') - tar.extractall( - path=os.path.join(oppia_tools_dir, 'coverage-4.5.4')) - tar.close() - os.remove('coverage-4.5.4.tar.gz') + install_third_party_libs.pip_install( + 'coverage', '4.5.4', + os.path.join(oppia_tools_dir, 'coverage-4.5.4')) # Compile typescript files. print 'Compiling typescript...' From 06dc081ac9d1217a7d7d29e4f29210ed643fc83f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 04:37:49 +0530 Subject: [PATCH 020/141] convert --- scripts/run_tests.py | 68 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 scripts/run_tests.py diff --git a/scripts/run_tests.py b/scripts/run_tests.py new file mode 100644 index 000000000000..531077bc0259 --- /dev/null +++ b/scripts/run_tests.py @@ -0,0 +1,68 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""INSTRUCTIONS: + +Run this script from the oppia root folder: + python -m scripts.run_tests + +It runs all the tests, in this order: +- Frontend Karma unit tests +- Backend Python tests +- End-to-end Protractor tests + +If any of these tests result in errors, this script will terminate. + +Note: The test scripts are arranged in increasing order of time taken. This +enables a broken build to be detected as quickly as possible. +""" +from __future__ import absolute_import # pylint: disable=import-only-modules + +import python_utils + +from . import run_backend_tests +from . import run_e2e_tests +from . import run_frontend_tests +from . import setup +from . import setup_gae + + +def main(): + """Run all the tests.""" + setup.main() + setup_gae.main() + + # Run frontend unit tests. + python_utils.PRINT('Running frontend unit tests') + run_frontend_tests.main() + python_utils.PRINT('Frontend tests passed.') + python_utils.PRINT('') + + # Run backend tests. + python_utils.PRINT('Running backend tests') + run_backend_tests.main() + python_utils.PRINT('Backend tests passed.') + python_utils.PRINT('') + + # Run end-to-end tests. + python_utils.PRINT('Running end-to-end tests') + run_e2e_tests.main() + + python_utils.PRINT('') + python_utils.PRINT( + 'SUCCESS All frontend, backend and end-to-end tests passed!') + + +if __name__ == '__main__': + main() From 845a94c550ab03258b4128160b04d79609bc16b7 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 17:04:08 +0530 Subject: [PATCH 021/141] remove files --- scripts/create_expression_parser.sh | 57 ------- scripts/install_chrome_on_travis.sh | 31 ---- scripts/install_third_party.sh | 212 ----------------------- scripts/run_backend_tests.sh | 105 ------------ scripts/run_e2e_tests.sh | 222 ------------------------ scripts/run_frontend_tests.sh | 76 --------- scripts/run_tests.sh | 66 ------- scripts/setup.sh | 255 ---------------------------- scripts/setup_gae.sh | 69 -------- scripts/vagrant_lock.sh | 60 ------- 10 files changed, 1153 deletions(-) delete mode 100755 scripts/create_expression_parser.sh delete mode 100644 scripts/install_chrome_on_travis.sh delete mode 100755 scripts/install_third_party.sh delete mode 100755 scripts/run_backend_tests.sh delete mode 100755 scripts/run_e2e_tests.sh delete mode 100755 scripts/run_frontend_tests.sh delete mode 100755 scripts/run_tests.sh delete mode 100755 scripts/setup.sh delete mode 100644 scripts/setup_gae.sh delete mode 100644 scripts/vagrant_lock.sh diff --git a/scripts/create_expression_parser.sh b/scripts/create_expression_parser.sh deleted file mode 100755 index 6e306ea94ef4..000000000000 --- a/scripts/create_expression_parser.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# Run this script from the oppia root folder: -# bash scripts/create_expression_parser.sh -# The root folder MUST be named 'oppia'. -# It produces the expression parser. - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -source $(dirname $0)/setup.sh || exit 1 - - -EXPRESSION_PARSER_DEFINITION=core/templates/dev/head/expressions/parser.pegjs -EXPRESSION_PARSER_JS=core/templates/dev/head/expressions/ExpressionParserService.js - -# Install the basic environment, e.g. nodejs. -bash scripts/install_third_party.sh - -echo Checking whether pegjs is installed in $TOOLS_DIR -if [ ! -d "$NODE_MODULE_DIR/pegjs" ]; then - echo Installing pegjs - $NPM_INSTALL pegjs@0.8.0 -fi - -$NODE_MODULE_DIR/pegjs/bin/pegjs $EXPRESSION_PARSER_DEFINITION $EXPRESSION_PARSER_JS -sed -i "s/module\.exports.*$/angular.module('oppia').factory('ExpressionParserService', ['\$log', function(\$log) {/" $EXPRESSION_PARSER_JS -sed -i 's/^})();\s*$/}]);/' $EXPRESSION_PARSER_JS - - -echo Done! diff --git a/scripts/install_chrome_on_travis.sh b/scripts/install_chrome_on_travis.sh deleted file mode 100644 index d9cb55b31cf3..000000000000 --- a/scripts/install_chrome_on_travis.sh +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2018 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This script should only be ran by Travis to install and provide a constant -# version of Chrome. -# CHROME_SOURCE_URL is an environment variable set in Oppia's Travis repo settings. -# It can be found under 'Environment Variables' header here: https://travis-ci.org/oppia/oppia/settings - -if [ ! -f $HOME/.cache/TravisChrome/$(basename $CHROME_SOURCE_URL) ]; then - # Caching Chrome's Debian package after download to prevent connection problem. - mkdir -p $HOME/.cache/TravisChrome/ - cd $HOME/.cache/TravisChrome/ - # --remote-name : Write output to a file named as the remote file. - # --location : Follow re-directs. - curl --remote-name --location $CHROME_SOURCE_URL - cd - -fi - -echo Installing $HOME/.cache/TravisChrome/$(basename $CHROME_SOURCE_URL) -sudo dpkg -i $HOME/.cache/TravisChrome/$(basename $CHROME_SOURCE_URL) diff --git a/scripts/install_third_party.sh b/scripts/install_third_party.sh deleted file mode 100755 index d63c8018f45a..000000000000 --- a/scripts/install_third_party.sh +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e -source $(dirname $0)/setup.sh || exit 1 - -function pip_install { - # Attempt standard pip install, or pass in --system if the local environment requires it. - # See https://github.com/pypa/pip/issues/3826 for context on when this situation may occur. - pip install "$@" || pip install --system "$@" -} - -# Future is needed to be installed first before executing -# scripts/install_third_party.py. -echo Checking if Future is installed in $THIRD_PARTY_DIR -if [ ! -d "$THIRD_PARTY_DIR/future-0.17.1" ]; then - echo Installing Future - - pip_install future==0.17.1 --target="$THIRD_PARTY_DIR/future-0.17.1" -fi - -# Download and install required JS and zip files. -echo Installing third-party JS libraries and zip files. -$PYTHON_CMD -m scripts.install_third_party - -# Install third-party node modules needed for the build process. -$NPM_INSTALL --only=dev -# This line removes the "npm ERR! missing:" messages. For reference, see this -# thread: https://github.com/npm/npm/issues/19393#issuecomment-374076889 -$NPM_CMD dedupe - -# Download and install Skulpt. Skulpt is built using a Python script included -# within the Skulpt repository (skulpt.py). This script normally requires -# GitPython, however the patches to it below (with the sed operations) lead to -# it no longer being required. The Python script is used to avoid having to -# manually recreate the Skulpt dist build process in install_third_party.py. -# Note that skulpt.py will issue a warning saying its dist command will not -# work properly without GitPython, but it does actually work due to the -# patches. -echo Checking whether Skulpt is installed in third_party -if [ ! "$NO_SKULPT" -a ! -d "$THIRD_PARTY_DIR/static/skulpt-0.10.0" ]; then - if [ ! -d "$TOOLS_DIR/skulpt-0.10.0" ]; then - echo Downloading Skulpt - cd $TOOLS_DIR - mkdir skulpt-0.10.0 - cd skulpt-0.10.0 - git clone https://github.com/skulpt/skulpt - cd skulpt - - # Use a specific Skulpt release. - git checkout 0.10.0 - - # Add a temporary backup file so that this script works on both Linux and - # Mac. - TMP_FILE=`mktemp /tmp/backup.XXXXXXXXXX` - - echo Compiling Skulpt - - # The Skulpt setup function needs to be tweaked. It fails without certain - # third party commands. These are only used for unit tests and generating - # documentation and are not necessary when building Skulpt. - sed -e "s/ret = test()/ret = 0/" $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py |\ - sed -e "s/ doc()/ pass#doc()/" |\ - # This and the next command disable unit and compressed unit tests for the - # compressed distribution of Skulpt. These tests don't work on some - # Ubuntu environments and cause a libreadline dependency issue. - sed -e "s/ret = os.system(\"{0}/ret = 0 #os.system(\"{0}/" |\ - sed -e "s/ret = rununits(opt=True)/ret = 0/" > $TMP_FILE - mv $TMP_FILE $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py - $PYTHON_CMD $TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py dist - - # Return to the Oppia root folder. - cd $OPPIA_DIR - fi - - # Move the build directory to the static resources folder. - mkdir -p $THIRD_PARTY_DIR/static/skulpt-0.10.0 - cp -r $TOOLS_DIR/skulpt-0.10.0/skulpt/dist/* $THIRD_PARTY_DIR/static/skulpt-0.10.0 -fi - -# Checking if pip is installed. If you are having -# trouble, please ensure that you have pip installed (see "Installing Oppia" -# on the Oppia developers' wiki page). -echo Checking if pip is installed on the local machine -if ! type pip > /dev/null 2>&1 ; then - echo "" - echo " Pip is required to install Oppia dependencies, but pip wasn't found" - echo " on your local machine." - echo "" - echo " Please see \"Installing Oppia\" on the Oppia developers' wiki page:" - - if [ "${OS}" == "Darwin" ] ; then - echo " https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-OS%29" - else - echo " https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux%29" - fi - - # If pip is not installed, quit. - exit 1 -fi - -echo Checking if pylint is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/pylint-1.9.4" ]; then - echo Installing Pylint - - pip_install pylint==1.9.4 --target="$TOOLS_DIR/pylint-1.9.4" -fi - -echo Checking if Pillow is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/Pillow-6.0.0" ]; then - echo Installing Pillow - - pip_install Pillow==6.0.0 --target="$TOOLS_DIR/Pillow-6.0.0" - - if [[ $? != 0 && ${OS} == "Darwin" ]]; then - echo " Pillow install failed. See troubleshooting instructions at:" - echo " https://github.com/oppia/oppia/wiki/Troubleshooting#mac-os" - fi - -fi - -echo Checking if pylint-quotes is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/pylint-quotes-0.2.1" ]; then - echo Installing pylint-quotes - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o pylint-quotes-0.2.1.tar.gz -L https://github.com/edaniszewski/pylint-quotes/archive/0.2.1.tar.gz - tar xzf pylint-quotes-0.2.1.tar.gz -C $TOOLS_DIR - rm pylint-quotes-0.2.1.tar.gz -fi - -# Install webtest. -echo Checking if webtest is installed in third_party -if [ ! -d "$TOOLS_DIR/webtest-2.0.33" ]; then - echo Installing webtest framework - # Note that the github URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o webtest-2.0.33.zip -L https://github.com/Pylons/webtest/archive/2.0.33.zip - unzip webtest-2.0.33.zip -d $TOOLS_DIR - rm webtest-2.0.33.zip -fi - -# Install isort. -echo Checking if isort is installed in third_party -if [ ! -d "$TOOLS_DIR/isort-4.3.20" ]; then - echo Installing isort - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o isort-4.3.20.tar.gz -L https://files.pythonhosted.org/packages/f1/84/5d66ddbe565e36682c336c841e51430384495b272c622ac229029f671be2/isort-4.3.20.tar.gz - tar xzf isort-4.3.20.tar.gz -C $TOOLS_DIR - rm isort-4.3.20.tar.gz -fi - -# Install pycodestyle. -echo Checking if pycodestyle is installed in third_party -if [ ! -d "$TOOLS_DIR/pycodestyle-2.5.0" ]; then - echo Installing pycodestyle - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o pycodestyle-2.5.0.tar.gz -L https://files.pythonhosted.org/packages/1c/d1/41294da5915f4cae7f4b388cea6c2cd0d6cd53039788635f6875dfe8c72f/pycodestyle-2.5.0.tar.gz - tar xzf pycodestyle-2.5.0.tar.gz -C $TOOLS_DIR - rm pycodestyle-2.5.0.tar.gz -fi - -# Install esprima. -echo Checking if esprima is installed in third_party -if [ ! -d "$TOOLS_DIR/esprima-4.0.1" ]; then - echo Installing esprima - # Note that the URL redirects, so we pass in -L to tell curl to follow the redirect. - curl -o esprima-4.0.1.tar.gz -L https://files.pythonhosted.org/packages/cc/a1/50fccd68a12bcfc27adfc9969c090286670a9109a0259f3f70943390b721/esprima-4.0.1.tar.gz - tar xzf esprima-4.0.1.tar.gz -C $TOOLS_DIR - rm esprima-4.0.1.tar.gz -fi - -# Python API for browsermob-proxy. -echo Checking if browsermob-proxy is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/browsermob-proxy-0.8.0" ]; then - echo Installing browsermob-proxy - - pip_install browsermob-proxy==0.8.0 --target="$TOOLS_DIR/browsermob-proxy-0.8.0" -fi - -echo Checking if selenium is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/selenium-3.13.0" ]; then - echo Installing selenium - - pip_install selenium==3.13.0 --target="$TOOLS_DIR/selenium-3.13.0" -fi - -echo Checking if PyGithub is installed in $TOOLS_DIR -if [ ! -d "$TOOLS_DIR/PyGithub-1.43.7" ]; then - echo Installing PyGithub - - pip_install PyGithub==1.43.7 --target="$TOOLS_DIR/PyGithub-1.43.7" -fi - -# install pre-commit script -echo Installing pre-commit hook for git -$PYTHON_CMD -m scripts.pre_commit_hook --install - -# install pre-push script -echo Installing pre-push hook for git -$PYTHON_CMD -m scripts.pre_push_hook --install diff --git a/scripts/run_backend_tests.sh b/scripts/run_backend_tests.sh deleted file mode 100755 index d3c950b0506b..000000000000 --- a/scripts/run_backend_tests.sh +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# Run this script from the oppia root folder: -# bash scripts/run_backend_tests.sh -# -# It runs all the (Python) backend tests, in parallel. -# -# ===================== -# CUSTOMIZATION OPTIONS -# ===================== -# -# (1) Generate a coverage report by adding the argument -# -# --generate_coverage_report -# -# but note that this will slow down the tests by a factor of 1.5 or more. -# -# (2) Append a test target to make the script run all tests in a given module -# or class, or run a particular test. For example, appending -# -# --test_target='foo.bar.Baz' -# -# runs all tests in test class Baz in the foo/bar.py module, and appending -# -# --test_target='foo.bar.Baz.quux' -# -# runs the test method quux in the test class Baz in the foo/bar.py module. -# -# (3) Append a test path to make the script run all tests in a given -# subdirectory. For example, appending -# -# --test_path='core/controllers' -# -# runs all tests in the core/controllers/ directory. -# -# (4) Enable the verbose log by add the argument. It will display the outputs of -# the tests being run. -# -# --verbose or -v -# -# IMPORTANT: Only one of --test_path and --test_target should be specified. - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -source $(dirname $0)/setup.sh || exit 1 -source $(dirname $0)/setup_gae.sh || exit 1 - -# Install third party dependencies -bash scripts/install_third_party.sh - -for arg in "$@"; do - if [ "$arg" == "--generate_coverage_report" ]; then - echo Checking whether coverage is installed in $TOOLS_DIR - if [ ! -d "$TOOLS_DIR/coverage-4.5.4" ]; then - echo Installing coverage - pip install coverage==4.5.4 --target="$TOOLS_DIR/coverage-4.5.4" - fi - fi -done - -echo "Compiling webpack..." -$NODE_MODULE_DIR/webpack/bin/webpack.js --config webpack.prod.config.ts - -$PYTHON_CMD scripts/backend_tests.py $@ - -for arg in "$@"; do - if [ "$arg" == "--generate_coverage_report" ]; then - $PYTHON_CMD $COVERAGE_HOME/coverage combine - $PYTHON_CMD $COVERAGE_HOME/coverage report --omit="$TOOLS_DIR/*","$THIRD_PARTY_DIR/*","/usr/share/*" --show-missing - - echo "Generating xml coverage report..." - $PYTHON_CMD $COVERAGE_HOME/coverage xml - fi -done - -echo '' -echo 'Done!' -echo '' diff --git a/scripts/run_e2e_tests.sh b/scripts/run_e2e_tests.sh deleted file mode 100755 index 49b483292b9d..000000000000 --- a/scripts/run_e2e_tests.sh +++ /dev/null @@ -1,222 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# Run this script from the oppia root folder: -# bash scripts/run_e2e_tests.sh -# -# Optional arguments: -# --browserstack Run the tests on browserstack using the -# protractor-browserstack.conf.js file. -# --skip-install=true/false If true, skips installing dependencies. The -# default value is false. -# --sharding=true/false Disables/Enables parallelization of protractor tests. -# --sharding-instances=# Sets the number of parallel browsers to open while -# sharding. -# --prod_env Run the tests in prod mode. Static resources are served from -# build directory and use cache slugs. -# Sharding must be disabled (either by passing in false to --sharding or 1 to -# --sharding-instances) if running any tests in isolation (fit or fdescribe). -# --suite=suite_name Performs test for different suites, here suites are the -# name of the test files present in core/tests/protractor_desktop/ and -# core/test/protractor/ dirs. e.g. for the file -# core/tests/protractor/accessibility.js use --suite=accessibility. -# For performing a full test, no argument is required. -# -# The root folder MUST be named 'oppia'. -# -# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a -# single test or test suite. - -function cleanup { - # Send a kill signal to the dev server and Selenium server. The awk command - # gets just the process ID from the grepped line. - kill `ps aux | grep "[Dd]ev_appserver.py --host=0.0.0.0 --port=9001" | awk '{print $2}'` - kill `ps aux | grep node_modules/webdriver-manager/selenium | awk '{print $2}'` - - # Wait for the servers to go down; suppress "connection refused" error output - # from nc since that is exactly what we are expecting to happen. - while ( nc -vz localhost 4444 >/dev/null 2>&1 ); do sleep 1; done - while ( nc -vz localhost 9001 >/dev/null 2>&1 ); do sleep 1; done - - if [ -d "../protractor-screenshots" ]; then - echo "" - echo " Note: If ADD_SCREENSHOT_REPORTER is set to true in" - echo " core/tests/protractor.conf.js, you can view screenshots" - echo " of the failed tests in ../protractor-screenshots/" - echo "" - fi - - echo Done! -} - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -source $(dirname $0)/setup.sh || exit 1 -source $(dirname $0)/setup_gae.sh || exit 1 -if [ "$TRAVIS" == 'true' ]; then - source $(dirname $0)/install_chrome_on_travis.sh || exit 1 -fi - -export DEFAULT_SKIP_INSTALLING_THIRD_PARTY_LIBS=false -export DEFAULT_RUN_MINIFIED_TESTS=false -maybeInstallDependencies "$@" - -if ( nc -vz localhost 8181 ); then - echo "" - echo " There is already a server running on localhost:8181." - echo " Please terminate it before running the end-to-end tests." - echo " Exiting." - echo "" - exit 1 -fi - - -# Forces the cleanup function to run on exit. -# Developers: note that at the end of this script, the cleanup() function at -# the top of the file is run. -trap cleanup EXIT - -# Argument passed to feconf.py to help choose production templates folder. -FORCE_PROD_MODE=False -RUN_ON_BROWSERSTACK=False -for arg in "$@"; do - # Used to emulate running Oppia in a production environment. - if [ "$arg" == "--prod_env" ]; then - FORCE_PROD_MODE=True - echo " Generating files for production mode..." - fi - - # Used to run the e2e tests on browserstack. - if [ "$arg" == "--browserstack" ]; then - RUN_ON_BROWSERSTACK=True - echo " Running the tests on browserstack..." - fi -done - -if [[ "$FORCE_PROD_MODE" == "True" ]]; then - constants_env_variable="\"DEV_MODE\": false" - sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.js - $PYTHON_CMD scripts/build.py --prod_env - APP_YAML_FILEPATH="app.yaml" -else - constants_env_variable="\"DEV_MODE\": true" - sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.js - $PYTHON_CMD scripts/build.py - APP_YAML_FILEPATH="app_dev.yaml" -fi - -# Delete the modified feconf.py file(-i.bak) -rm assets/constants.js.bak - -# Start a selenium server using chromedriver 2.41. -# The 'detach' option continues the flow once the server is up and runnning. -# The 'quiet' option prints only the necessary information about the server start-up -# process. -$NODE_MODULE_DIR/.bin/webdriver-manager update --versions.chrome 2.41 -$NODE_MODULE_DIR/.bin/webdriver-manager start --versions.chrome 2.41 --detach --quiet - -# Start a selenium process. The program sends thousands of lines of useless -# info logs to stderr so we discard them. -# TODO(jacob): Find a webdriver or selenium argument that controls log level. -($NODE_MODULE_DIR/.bin/webdriver-manager start 2>/dev/null)& -# Start a demo server. -($PYTHON_CMD $GOOGLE_APP_ENGINE_HOME/dev_appserver.py --host=0.0.0.0 --port=9001 --clear_datastore=yes --dev_appserver_log_level=critical --log_level=critical --skip_sdk_update_check=true $APP_YAML_FILEPATH)& - -# Wait for the servers to come up. -while ! nc -vz localhost 4444; do sleep 1; done -while ! nc -vz localhost 9001; do sleep 1; done - -# Delete outdated screenshots -if [ -d "../protractor-screenshots" ]; then - rm -r ../protractor-screenshots -fi - -# Parse additional command line arguments that may be passed to protractor. -# Credit: http://stackoverflow.com/questions/192249 -# Passing different suites and sharding parameters for tests. -SUITE="full" -SHARDING=true -SHARD_INSTANCES=3 -for j in "$@"; do - # Match each space-separated argument passed to the shell file to a separate - # case label, based on a pattern. E.g. Match to -suite=*, -sharding=*, where the - # asterisk refers to any characters following the equals sign, other than - # whitespace. - case $j in - --suite=*) - # Extract the value right of the equal sign by substringing the $i variable - # at the equal sign. - # http://tldp.org/LDP/abs/html/string-manipulation.html - SUITE="${j#*=}" - # Shifts the argument parameters over by one. E.g. $2 becomes $1, etc. - shift - ;; - - --sharding=*) - SHARDING="${j#*=}" - shift - ;; - - --sharding-instances=*) - SHARD_INSTANCES="${j#*=}" - shift - ;; - - --prod_env*) - shift - ;; - - --browserstack*) - shift - ;; - - *) - echo "Error: Unknown command line option: $j" - ;; - esac -done - -# Run the end-to-end tests. The conditional is used to run protractor without -# any sharding parameters if it is disabled. This helps with isolated tests. -# Isolated tests do not work properly unless no sharding parameters are passed -# in at all. -# TODO(bhenning): Figure out if this is a bug with protractor. -if [ "$RUN_ON_BROWSERSTACK" == "False" ]; then - if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then - $NODE_MODULE_DIR/protractor/bin/protractor core/tests/protractor.conf.js --suite "$SUITE" - else - $NODE_MODULE_DIR/protractor/bin/protractor core/tests/protractor.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" - fi -else - if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then - $NODE_MODULE_DIR/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --suite "$SUITE" - else - $NODE_MODULE_DIR/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" - fi -fi diff --git a/scripts/run_frontend_tests.sh b/scripts/run_frontend_tests.sh deleted file mode 100755 index 3ff538f30ac5..000000000000 --- a/scripts/run_frontend_tests.sh +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# Run this script from the oppia root folder: -# bash scripts/run_frontend_tests.sh -# -# Optional arguments: -# --skip-install=true/false If true, skips installing dependencies. The -# default value is false. -# --run-minified-tests=true/false Whether to run frontend karma tests on both -# minified and non-minified code. The default value is false. -# -# The root folder MUST be named 'oppia'. -# It runs unit tests for frontend JavaScript code (using Karma). -# -# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a -# single test or test suite. - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -source $(dirname $0)/setup.sh || exit 1 -source $(dirname $0)/setup_gae.sh || exit 1 - -export DEFAULT_SKIP_INSTALLING_THIRD_PARTY_LIBS=false -export DEFAULT_RUN_MINIFIED_TESTS=false -maybeInstallDependencies "$@" - -echo "" -echo " View interactive frontend test coverage reports by navigating to" -echo "" -echo " ../karma_coverage_reports" -echo "" -echo " on your filesystem." -echo "" - -echo "" -echo " Running test in development environment" -echo "" -$PYTHON_CMD scripts/build.py -$XVFB_PREFIX $NODE_MODULE_DIR/karma/bin/karma start core/tests/karma.conf.ts - -if [ "$RUN_MINIFIED_TESTS" = "true" ]; then - echo "" - echo " Running test in production environment" - echo "" - $PYTHON_CMD scripts/build.py --prod_env --minify_third_party_libs_only - $XVFB_PREFIX $NODE_MODULE_DIR/karma/bin/karma start core/tests/karma.conf.ts --prodEnv -fi - -echo Done! diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh deleted file mode 100755 index 4d2d7ce3013b..000000000000 --- a/scripts/run_tests.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2015 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# Run this script from the oppia root folder: -# bash scripts/run_tests.sh -# -# It runs all the tests, in this order: -# - Frontend Karma unit tests -# - Backend Python tests -# - End-to-end Protractor tests -# -# If any of these tests result in errors, this script will terminate. -# -# Note: The test scripts are arranged in increasing order of time taken. This -# enables a broken build to be detected as quickly as possible. - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -source $(dirname $0)/setup.sh || exit 1 -source $(dirname $0)/setup_gae.sh || exit 1 - -# Run frontend unit tests. -echo 'Running frontend unit tests' -source $(dirname $0)/run_frontend_tests.sh || exit 1 -echo 'Frontend tests passed.' -echo '' - -# Run backend tests. -echo 'Running backend tests' -source $(dirname $0)/run_backend_tests.sh || exit 1 -echo 'Backend tests passed.' -echo '' - -# Run end-to-end tests. -echo 'Running end-to-end tests' -source $(dirname $0)/run_e2e_tests.sh || exit 1 - -echo '' -echo 'SUCCESS All frontend, backend and end-to-end tests passed!' -echo '' diff --git a/scripts/setup.sh b/scripts/setup.sh deleted file mode 100755 index bc6ae4bcc9c8..000000000000 --- a/scripts/setup.sh +++ /dev/null @@ -1,255 +0,0 @@ -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# This file should not be invoked directly, but sourced from other sh scripts. -# Bash execution environent set up for all scripts. - - -function maybeInstallDependencies { - # Parse additional command line arguments. - # Credit: https://stackoverflow.com/questions/192249 - export SKIP_INSTALLING_THIRD_PARTY_LIBS=$DEFAULT_SKIP_INSTALLING_THIRD_PARTY_LIBS - export RUN_MINIFIED_TESTS=$DEFAULT_RUN_MINIFIED_TESTS - for i in "$@"; do - # Match each space-separated argument passed to the shell file to a separate - # case label, based on a pattern. E.g. Match to --skip-install=*, where the - # asterisk refers to any characters following the equals sign, other than - # whitespace. - case $i in - --skip-install=*) - # Extract the value right of the equal sign by substringing the $i - # variable at the equal sign. - # http://tldp.org/LDP/abs/html/string-manipulation.html - SKIP_INSTALLING_THIRD_PARTY_LIBS="${i#*=}" - # Shifts the argument parameters over by one. E.g. $2 becomes $1, etc. - shift - ;; - - --run-minified-tests=*) - RUN_MINIFIED_TESTS="${i#*=}" - shift - ;; - - esac - done - - if [ "$SKIP_INSTALLING_THIRD_PARTY_LIBS" = "false" ]; then - # Install third party dependencies - # TODO(sll): Make this work with fewer third-party dependencies. - bash scripts/install_third_party.sh - - # Ensure that generated JS and CSS files are in place before running the - # tests. - echo "" - echo " Running build task with concatenation only " - echo "" - - $PYTHON_CMD scripts/build.py - fi - - if [ "$RUN_MINIFIED_TESTS" = "true" ]; then - echo "" - echo " Running build task with concatenation and minification" - echo "" - - $PYTHON_CMD scripts/build.py --prod_env - fi -} - -if [ "$SETUP_DONE" ]; then - echo 'Environment setup completed.' - return 0 -fi - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash scripts/$0" - echo "" - return 1 -fi - -# TODO(sll): Consider using getopts command. -declare -a remaining_params -for arg in "$@"; do - if [ "$arg" == "--nojsrepl" ] || [ "$arg" == "--noskulpt" ]; then - NO_SKULPT=true - else - remaining_params+=($arg) - fi -done -export NO_SKULPT -export remaining_params - -EXPECTED_PWD='oppia' -# The second option allows this script to also be run from deployment folders. -if [[ ${PWD##*/} != $EXPECTED_PWD ]] && [[ ${PWD##*/} != deploy-* ]]; then - echo "" - echo " WARNING This script should be run from the oppia/ root folder." - echo "" - return 1 -fi - -export OPPIA_DIR=`pwd` -# Set COMMON_DIR to the absolute path of the directory above OPPIA_DIR. This -# is necessary becaue COMMON_DIR (or subsequent variables which refer to it) -# may use it in a situation where relative paths won't work as expected (such -# as $PYTHONPATH). -export COMMON_DIR=$(cd $OPPIA_DIR/..; pwd) -export TOOLS_DIR=$COMMON_DIR/oppia_tools -export THIRD_PARTY_DIR=$OPPIA_DIR/third_party -export NODE_MODULE_DIR=$OPPIA_DIR/node_modules -export ME=$(whoami) - -mkdir -p $TOOLS_DIR -mkdir -p $THIRD_PARTY_DIR -mkdir -p $NODE_MODULE_DIR - -# Adjust the path to include a reference to node. -export NODE_PATH=$TOOLS_DIR/node-10.15.3 -export PATH=$NODE_PATH/bin:$PATH -export MACHINE_TYPE=`uname -m` -export OS=`uname` - -if [ ! "${OS}" == "Darwin" -a ! "${OS}" == "Linux" ]; then - # Node is a requirement for all installation scripts. Here, we check if the - # OS supports node.js installation; if not, we exit with an error. - echo "" - echo " WARNING: Unsupported OS for installation of node.js." - echo " If you are running this script on Windows, see the instructions" - echo " here regarding installation of node.js:" - echo "" - echo " https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows%29" - echo "" - echo " STATUS: Installation completed except for node.js. Exiting." - echo "" - return 1 -else - # Otherwise, npm will be installed locally, in NODE_PATH. - export NPM_CMD=$NODE_PATH/bin/npm -fi - -export NPM_INSTALL="$NPM_CMD install" - -# Download and install node.js. -echo Checking if node.js is installed in $TOOLS_DIR -if [ ! -d "$NODE_PATH" ]; then - echo Installing Node.js - if [ ${OS} == "Darwin" ]; then - if [ ${MACHINE_TYPE} == 'x86_64' ]; then - NODE_FILE_NAME=node-v10.15.3-darwin-x64 - else - NODE_FILE_NAME=node-v10.15.3-darwin-x86 - fi - elif [ ${OS} == "Linux" ]; then - if [ ${MACHINE_TYPE} == 'x86_64' ]; then - NODE_FILE_NAME=node-v10.15.3-linux-x64 - else - NODE_FILE_NAME=node-v10.15.3-linux-x86 - fi - fi - - curl -o node-download.tgz https://nodejs.org/dist/v10.15.3/$NODE_FILE_NAME.tar.gz - tar xzf node-download.tgz --directory $TOOLS_DIR - mv $TOOLS_DIR/$NODE_FILE_NAME $NODE_PATH - rm node-download.tgz - - # Change ownership of $NODE_MODULE_DIR. - # Note: on some machines, these commands seem to take quite a long time. - chown -R $ME $NODE_MODULE_DIR - chmod -R 744 $NODE_MODULE_DIR -fi - -# Adjust path to support the default Chrome locations for Unix, Windows and Mac OS. -if [ "$TRAVIS" == true ]; then - export CHROME_BIN="/usr/bin/chromium-browser" -elif [ "$VAGRANT" == true ] || [ -f "/etc/is_vagrant_vm" ]; then - # XVFB is required for headless testing in Vagrant - sudo apt-get install xvfb chromium-browser - export CHROME_BIN="/usr/bin/chromium-browser" - # Used in frontend and e2e tests. Only gets set if using Vagrant VM. - export XVFB_PREFIX="/usr/bin/xvfb-run" - # Enforce proper ownership on oppia, oppia_tools, and node_modules or else NPM installs will fail. - sudo chown -R vagrant.vagrant /home/vagrant/oppia /home/vagrant/oppia_tools /home/vagrant/node_modules -elif [ -f "/usr/bin/google-chrome" ]; then - # Unix. - export CHROME_BIN="/usr/bin/google-chrome" -elif [ -f "/usr/bin/chromium-browser" ]; then - # Unix. - export CHROME_BIN="/usr/bin/chromium-browser" -elif [ -f "/c/Program Files (x86)/Google/Chrome/Application/chrome.exe" ]; then - # Windows. - export CHROME_BIN="/c/Program Files (x86)/Google/Chrome/Application/chrome.exe" -elif [ -f "/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe" ]; then - # WSL - export CHROME_BIN="/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe" -elif [ -f "/Applications/Google Chrome.app/Contents/MacOS/Google Chrome" ]; then - # Mac OS. - export CHROME_BIN="/Applications/Google Chrome.app/Contents/MacOS/Google Chrome" -else - echo "Chrome is not found, stopping ..." - exit 1 -fi - -# This function takes a command for python as its only input. -# It checks this input for a specific version of python and returns false -# if it does not match the expected prefix. -function test_python_version() { - EXPECTED_PYTHON_VERSION_PREFIX="2.7" - PYTHON_VERSION=$($1 --version 2>&1) - if [[ $PYTHON_VERSION =~ Python[[:space:]](.+) ]]; then - PYTHON_VERSION=${BASH_REMATCH[1]} - else - echo "Unrecognizable Python command output: ${PYTHON_VERSION}" - # Return a false condition if output of tested command is unrecognizable. - return 1 - fi - if [[ "${PYTHON_VERSION}" = ${EXPECTED_PYTHON_VERSION_PREFIX}* ]]; then - # Return 0 to indicate a successful match. - # Return 1 to indicate a failed match. - return 0 - else - return 1 - fi -} - -# First, check the default Python command (which should be found within the user's $PATH). -PYTHON_CMD="python" -# Test whether the 'python' or 'python2.7' commands exist and finally fails when -# no suitable python version 2.7 can be found. -if ! test_python_version $PYTHON_CMD; then - echo "Unable to find 'python'. Trying python2.7 instead..." - PYTHON_CMD="python2.7" - if ! test_python_version $PYTHON_CMD; then - echo "Could not find a suitable Python environment. Exiting." - # If OS is Windows, print helpful error message about adding Python to path. - if [ ! "${OS}" == "Darwin" -a ! "${OS}" == "Linux" ]; then - echo "It looks like you are using Windows. If you have Python installed," - echo "make sure it is in your PATH and that PYTHONPATH is set." - echo "If you have two versions of Python (ie, Python 2.7 and 3), specify 2.7 before other versions of Python when setting the PATH." - echo "Here are some helpful articles:" - echo "http://docs.python-guide.org/en/latest/starting/install/win/" - echo "https://stackoverflow.com/questions/3701646/how-to-add-to-the-pythonpath-in-windows-7" - fi - # Exit when no suitable Python environment can be found. - return 1 - fi -fi -export PYTHON_CMD - -export SETUP_DONE=true diff --git a/scripts/setup_gae.sh b/scripts/setup_gae.sh deleted file mode 100644 index d5a9b13da0c1..000000000000 --- a/scripts/setup_gae.sh +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# This file should not be invoked directly, but sourced from other sh scripts. -# Bash execution environment setup for scripts that require GAE. - - -if [ "$SETUP_GAE_DONE" ]; then - return 0 -fi - -export GOOGLE_APP_ENGINE_HOME=$TOOLS_DIR/google_appengine_1.9.67/google_appengine -export GOOGLE_CLOUD_SDK_HOME=$TOOLS_DIR/google-cloud-sdk-251.0.0/google-cloud-sdk -export COVERAGE_HOME=$TOOLS_DIR/coverage-4.5.4 - -# Note that if the following line is changed so that it uses webob_1_1_1, PUT requests from the frontend fail. -export PYTHONPATH=.:$COVERAGE_HOME:$GOOGLE_APP_ENGINE_HOME:$GOOGLE_APP_ENGINE_HOME/lib/webob_0_9:$TOOLS_DIR/webtest-2.0.33:$PYTHONPATH - -# Delete old *.pyc files -find . -iname "*.pyc" -exec rm -f {} \; - -echo Checking whether Google App Engine is installed in $GOOGLE_APP_ENGINE_HOME -if [ ! -d "$GOOGLE_APP_ENGINE_HOME" ]; then - echo "Downloading Google App Engine (this may take a little while)..." - mkdir -p $GOOGLE_APP_ENGINE_HOME - curl -o gae-download.zip https://storage.googleapis.com/appengine-sdks/featured/google_appengine_1.9.67.zip - # $? contains the (exit) status code of previous command. - # If curl was successful, $? will be 0 else non-zero. - if [ 0 -eq $? ]; then - echo "Download complete. Installing Google App Engine..." - else - echo "Error downloading Google App Engine. Exiting." - exit 1 - fi - unzip -q gae-download.zip -d $TOOLS_DIR/google_appengine_1.9.67/ - rm gae-download.zip -fi - -echo Checking whether google-cloud-sdk is installed in $GOOGLE_CLOUD_SDK_HOME -if [ ! -d "$GOOGLE_CLOUD_SDK_HOME" ]; then - echo "Downloading Google Cloud SDK (this may take a little while)..." - mkdir -p $GOOGLE_CLOUD_SDK_HOME - curl -o gcloud-sdk.tar.gz https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-251.0.0-linux-x86_64.tar.gz - # $? contains the (exit) status code of previous command. - # If curl was successful, $? will be 0 else non-zero. - if [ 0 -eq $? ]; then - echo "Download complete. Installing Google Cloud SDK..." - else - echo "Error downloading Google Cloud SDK. Exiting." - exit 1 - fi - tar xzf gcloud-sdk.tar.gz -C $TOOLS_DIR/google-cloud-sdk-251.0.0/ - rm gcloud-sdk.tar.gz -fi - -export SETUP_GAE_DONE=true diff --git a/scripts/vagrant_lock.sh b/scripts/vagrant_lock.sh deleted file mode 100644 index 547c5bce979e..000000000000 --- a/scripts/vagrant_lock.sh +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2016 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# This file should not be invoked directly, but sourced from other sh scripts. - -# Creates a lockfile to help with new user confusion when launching a vagrant -# vm. See https://github.com/oppia/oppia/pull/2749 for details. -# -# It can be overridden by passing --nolock to start.sh - - -for arg in "$@"; do - case $arg in - --nolock) - NO_LOCK=true - ;; - esac -done - -if [ $NO_LOCK ]; then - return 0 -fi - -VAGRANT_LOCK_FILE="./.lock" - -function vagrant_lock_cleanup { - if [ ! $NO_CLEAN ]; then - rm -rf $VAGRANT_LOCK_FILE - fi -} - -trap vagrant_lock_cleanup EXIT - -if [ -e "$VAGRANT_LOCK_FILE" ] -then - echo "" - echo " Another setup instance is already running " - echo "" - echo " Please wait for that instance to complete or terminate it " - echo "" - echo " If you ran $0 twice on purpose, you can override this with --nolock " - echo "" - NO_CLEAN=1 - return 1 -else - touch $VAGRANT_LOCK_FILE -fi From 4df72476c0ca9d7a1c46ec0d1f6528ac0773a02a Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 18:35:07 +0530 Subject: [PATCH 022/141] convert --- .github/CODEOWNERS | 2 +- scripts/common.py | 43 ++++++++ scripts/create_expression_parser.py | 29 +++--- scripts/install_third_party.py | 4 +- scripts/install_third_party_libs.py | 46 ++++---- scripts/pre_commit_linter.py | 2 +- scripts/run_backend_tests.py | 35 ++++--- scripts/run_e2e_tests.py | 54 ++-------- scripts/run_performance_tests.py | 156 ++++++++++++++++++++++++++++ scripts/run_performance_tests.sh | 123 ---------------------- scripts/setup.py | 83 ++++++++------- scripts/setup_gae.py | 61 +++++------ 12 files changed, 348 insertions(+), 290 deletions(-) create mode 100644 scripts/run_performance_tests.py delete mode 100644 scripts/run_performance_tests.sh diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a374b989dc86..037e500f82cb 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -487,5 +487,5 @@ /export/ @seanlip /manifest.json @seanlip @vojtechjelinek /package*.json @seanlip @vojtechjelinek -/scripts/install_third_party.sh @seanlip @vojtechjelinek +/scripts/install_third_party_libs.py @seanlip @vojtechjelinek /.github/ @DubeySandeep diff --git a/scripts/common.py b/scripts/common.py index 3d137be3f271..844d56d0bb9f 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -16,11 +16,26 @@ from __future__ import absolute_import # pylint: disable=import-only-modules import os +import signal +import socket import subprocess +import sys import python_utils +_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +_PSUTIL_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'psutil-5.6.3') +sys.path.insert(0, _PSUTIL_PATH) + +import psutil # isort:skip # pylint: disable=wrong-import-position + RELEASE_BRANCH_NAME_PREFIX = 'release-' +CURR_DIR = os.path.abspath(os.getcwd()) +OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') +GOOGLE_APP_ENGINE_HOME = os.path.join( + OPPIA_TOOLS_DIR, 'google_appengine_1.9.67/google_appengine') +GOOGLE_CLOUD_SDK_HOME = os.path.join( + OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/google-cloud-sdk') def ensure_directory_exists(d): @@ -164,6 +179,34 @@ def ensure_release_scripts_folder_exists_and_is_up_to_date(): subprocess.call(['git', 'pull', remote_alias]) +def is_port_open(port): + """Checks if no process is listening to the port. + + Args: + port: int. The port number. + + Return: + bool. True if port is open else False. + """ + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + result = sock.connect_ex(('localhost', port)) + sock.close() + return bool(result) + + +# Credits: https://stackoverflow.com/a/20691431/11755830 +def kill_process(port): + """Kills a process that is listening to a specific port. + + Args: + port: int. The port number. + """ + for process in psutil.process_iter(): + for conns in process.connections(kind='inet'): + if conns.laddr.port == port: + process.send_signal(signal.SIGTERM) + + class CD(python_utils.OBJECT): """Context manager for changing the current working directory.""" diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 0298e2f626be..caba3c94eac3 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -18,12 +18,16 @@ The root folder MUST be named 'oppia'. It produces the expression parser. """ +from __future__ import absolute_import # pylint: disable=import-only-modules import fileinput import os import re import subprocess +import python_utils + +from . import common from . import install_third_party_libs from . import setup @@ -39,14 +43,12 @@ def main(): # Install the basic environment, e.g. nodejs. install_third_party_libs.main() + node_path = os.path.join(common.OPPIA_TOOLS_DIR, 'node-10.15.3') - curr_dir = os.path.abspath(os.getcwd()) - oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') - - print 'Checking whether pegjs is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking whether pegjs is installed in %s' % common.OPPIA_TOOLS_DIR) if not os.path.exists('node_modules/pegjs'): - print 'Installing pegjs' + python_utils.PRINT('Installing pegjs') subprocess.call(('%s/bin/npm install pegjs@0.8.0' % node_path).split()) subprocess.call(( @@ -54,14 +56,17 @@ def main(): % (expression_parser_definition, expression_parser_js)).split()) for line in fileinput.input(files=expression_parser_js, inplace=True): - print re.sub( - r'module\.exports.*$', - 'angular.module(\'oppia\').factory(' - '\'ExpressionParserService\', [\'$log\', function($log) {', line), + python_utils.PRINT( + re.sub( + r'module\.exports.*$', + 'angular.module(\'oppia\').factory(' + '\'ExpressionParserService\', [\'$log\', function($log) {', + line), end='') - print re.sub(r'^})();\s*$', '}]);', line), + python_utils.PRINT( + re.sub(r'^})();\s*$', '}]);', line), end='') - print 'Done!' + python_utils.PRINT('Done!') if __name__ == '__main__': diff --git a/scripts/install_third_party.py b/scripts/install_third_party.py index e19c8b36de6f..94d0fa4a57ee 100644 --- a/scripts/install_third_party.py +++ b/scripts/install_third_party.py @@ -325,10 +325,10 @@ def download_manifest_files(filepath): dependency_tar_root_name, dependency_target_root_name) -def install_third_party_libs(): +def main(): """Installs all the third party libraries.""" download_manifest_files(MANIFEST_FILE_PATH) if __name__ == '__main__': - install_third_party_libs() + main() diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index c376a10296a0..3007e099e731 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -25,6 +25,7 @@ import python_utils +from . import common from . import install_third_party from . import setup @@ -106,24 +107,22 @@ def main(): help='optional; if specified, skips installation of skulpt.', action='store_true') - curr_dir = os.path.abspath(os.getcwd()) - oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') + node_path = os.path.join(common.OPPIA_TOOLS_DIR, 'node-10.15.3') third_party_dir = os.path.join('.', 'third_party') pip_dependencies = [ ('future', '0.17.1', third_party_dir), - ('pylint', '1.9.4', oppia_tools_dir), - ('Pillow', '6.0.0', oppia_tools_dir), - ('pylint-quotes', '0.2.1', oppia_tools_dir), - ('webtest', '2.0.33', oppia_tools_dir), - ('isort', '4.3.20', oppia_tools_dir), - ('pycodestyle', '2.5.0', oppia_tools_dir), - ('esprima', '4.0.1', oppia_tools_dir), - ('browsermob-proxy', '0.8.0', oppia_tools_dir), - ('selenium', '3.13.0', oppia_tools_dir), - ('PyGithub', '1.43.7', oppia_tools_dir), - ('psutil', '5.6.3', oppia_tools_dir), + ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR), + ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR), + ('pylint-quotes', '0.2.1', common.OPPIA_TOOLS_DIR), + ('webtest', '2.0.33', common.OPPIA_TOOLS_DIR), + ('isort', '4.3.20', common.OPPIA_TOOLS_DIR), + ('pycodestyle', '2.5.0', common.OPPIA_TOOLS_DIR), + ('esprima', '4.0.1', common.OPPIA_TOOLS_DIR), + ('browsermob-proxy', '0.8.0', common.OPPIA_TOOLS_DIR), + ('selenium', '3.13.0', common.OPPIA_TOOLS_DIR), + ('PyGithub', '1.43.7', common.OPPIA_TOOLS_DIR), + ('psutil', '5.6.3', common.OPPIA_TOOLS_DIR), ] for package, version, path in pip_dependencies: @@ -139,7 +138,7 @@ def main(): # Download and install required JS and zip files. python_utils.PRINT('Installing third-party JS libraries and zip files.') - install_third_party.install_third_party_libs() + install_third_party.main() # Install third-party node modules needed for the build process. subprocess.call(('%s/bin/npm install --only=dev' % node_path).split()) @@ -166,9 +165,10 @@ def main(): if not os.path.exists( os.path.join( third_party_dir, 'static/skulpt-0.10.0')) and not no_skulpt: - if not os.path.exists(os.path.join(oppia_tools_dir, 'skulpt-0.10.0')): + if not os.path.exists( + os.path.join(common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0')): python_utils.PRINT('Downloading Skulpt') - os.chdir(oppia_tools_dir) + os.chdir(common.OPPIA_TOOLS_DIR) os.mkdir('skulpt-0.10.0') os.chdir('skulpt-0.10.0') subprocess.call( @@ -190,7 +190,8 @@ def main(): # Skulpt. for line in fileinput.input( files=os.path.join( - oppia_tools_dir, 'skulpt-0.10.0/skulpt/skulpt.py')): + common.OPPIA_TOOLS_DIR, + 'skulpt-0.10.0/skulpt/skulpt.py')): # Inside this loop the STDOUT will be redirected to the file. # The comma after each python_utils.PRINT statement is needed to # avoid double line breaks. @@ -220,10 +221,10 @@ def main(): shutil.move( tmp_file, os.path.join( - oppia_tools_dir, 'skulpt-0.10.0/skulpt/skulpt.py')) + common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')) subprocess.call( - 'python $oppia_tools_dir/skulpt-0.10.0/skulpt/skulpt.py dist' - .split()) + 'python $common.OPPIA_TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py ' + 'dist'.split()) # Return to the Oppia root folder. os.chdir(OPPIA_DIR) @@ -231,7 +232,8 @@ def main(): # Move the build directory to the static resources folder. os.makedirs(os.path.join(third_party_dir, 'static/skulpt-0.10.0')) shutil.copytree( - os.path.join(oppia_tools_dir, 'skulpt-0.10.0/skulpt/dist/'), + os.path.join( + common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/dist/'), os.path.join(third_party_dir, 'static/skulpt-0.10.0')) # Install pre-commit script. diff --git a/scripts/pre_commit_linter.py b/scripts/pre_commit_linter.py index fb9f07bd5409..24fc26efcfe2 100644 --- a/scripts/pre_commit_linter.py +++ b/scripts/pre_commit_linter.py @@ -539,7 +539,7 @@ '/export/', '/manifest.json', '/package*.json', - '/scripts/install_third_party.sh', + '/scripts/install_third_party_libs.py', '/.github/'] if not os.getcwd().endswith('oppia'): diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 23f56d3406c9..48938c1be9f6 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -54,15 +54,17 @@ IMPORTANT: Only one of --test_path and --test_target should be specified. """ +from __future__ import absolute_import # pylint: disable=import-only-modules import argparse import os import subprocess -import tarfile -import urllib + +import python_utils from . import backend_tests from . import build +from . import common from . import install_third_party_libs from . import setup from . import setup_gae @@ -82,25 +84,26 @@ def main(): # Install third party dependencies. subprocess.call('bash scripts/install_third_party.sh'.split()) - curr_dir = os.path.abspath(os.getcwd()) - oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.4') + coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') coverage_path = os.path.join(coverage_home, 'coverage') parsed_args, _ = _PARSER.parse_known_args() if parsed_args.generate_coverage_report: - print 'Checking whether coverage is installed in %s' % oppia_tools_dir - if not os.path.exists(os.path.join(oppia_tools_dir, 'coverage-4.5.4')): - print 'Installing coverage' + python_utils.PRINT( + 'Checking whether coverage is installed in %s' + % common.OPPIA_TOOLS_DIR) + if not os.path.exists( + os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4')): + python_utils.PRINT('Installing coverage') install_third_party_libs.pip_install( 'coverage', '4.5.4', - os.path.join(oppia_tools_dir, 'coverage-4.5.4')) + os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4')) # Compile typescript files. - print 'Compiling typescript...' + python_utils.PRINT('Compiling typescript...') subprocess.call('node_modules/typescript/bin/tsc --project .'.split()) - print 'Compiling webpack...' + python_utils.PRINT('Compiling webpack...') subprocess.call( 'node_modules/webpack/bin/webpack.js --config webpack.prod.config.ts' .split()) @@ -112,14 +115,14 @@ def main(): subprocess.call(('python %s combine' % coverage_path).split()) subprocess.call( ('python %s report --omit="%s*","third_party/*","/usr/share/*" ' - '--show-missing' % (coverage_path, oppia_tools_dir)).split()) + '--show-missing' + % (coverage_path, common.OPPIA_TOOLS_DIR)).split()) - print 'Generating xml coverage report...' + python_utils.PRINT('Generating xml coverage report...') subprocess.call(('python %s xml' % coverage_path).split()) - print '' - print 'Done!' - print '' + python_utils.PRINT('') + python_utils.PRINT('Done!') if __name__ == '__main__': diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 167c7f1b4cbe..c9c64a932609 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -47,8 +47,6 @@ import fileinput import os import shutil -import signal -import socket import subprocess import sys import time @@ -56,16 +54,11 @@ import python_utils from . import build +from . import common from . import install_chrome_on_travis from . import setup from . import setup_gae -_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -_PSUTIL_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'psutil-5.6.3') -sys.path.insert(0, _PSUTIL_PATH) - -import psutil # isort:skip # pylint: disable=wrong-import-position - _PARSER = argparse.ArgumentParser() _PARSER.add_argument( '--skip_install', @@ -99,27 +92,14 @@ default='3') -# Credits: https://stackoverflow.com/a/20691431/11755830 -def kill_process(port): - """Kills a process that is listening to a specific port. - - Args: - port: int. The port number. - """ - for process in psutil.process_iter(): - for conns in process.connections(kind='inet'): - if conns.laddr.port == port: - process.send_signal(signal.SIGTERM) - - def cleanup(): """Send a kill signal to the dev server and Selenium server.""" - kill_process(4444) - kill_process(9001) + common.kill_process(4444) + common.kill_process(9001) # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. - while not is_port_open(4444) or not is_port_open(9001): + while not common.is_port_open(4444) or not common.is_port_open(9001): time.sleep(1) if os.path.isdir('../protractor-screenshots'): @@ -134,21 +114,6 @@ def cleanup(): python_utils.PRINT('Done!') -def is_port_open(port): - """Checks if no process is listening to the port. - - Args: - port: int. The port number. - - Return: - bool. True if port is open else False. - """ - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - result = sock.connect_ex(('localhost', port)) - sock.close() - return bool(result) - - def main(): """Runs the end to end tests.""" setup.main() @@ -160,7 +125,7 @@ def main(): setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - if not is_port_open(8181): + if not common.is_port_open(8181): python_utils.PRINT('') python_utils.PRINT( 'There is already a server running on localhost:8181.') @@ -174,7 +139,6 @@ def main(): # Developers: note that at the end of this script, the cleanup() function at # the top of the file is run. atexit.register(cleanup) - signal.signal(signal.SIGINT, cleanup) if parsed_args.prod_env: python_utils.PRINT('Generating files for production mode...') @@ -215,18 +179,14 @@ def main(): subprocess.call( 'node_modules/.bin/webdriver-manager start 2>/dev/null)&'.split()) # Start a demo server. - curr_dir = os.path.abspath(os.getcwd()) - oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - google_app_engine_home = os.path.join( - oppia_tools_dir, 'google_appengine_1.9.67/google_appengine') subprocess.call( ('python %s/dev_appserver.py --host=0.0.0.0 --port=9001 ' '--clear_datastore=yes --dev_appserver_log_level=critical ' '--log_level=critical --skip_sdk_update_check=true $%s)&' - % (google_app_engine_home, app_yaml_filepath)).split()) + % (common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath)).split()) # Wait for the servers to come up. - while is_port_open(4444) or is_port_open(9001): + while common.is_port_open(4444) or common.is_port_open(9001): time.sleep(1) # Delete outdated screenshots. diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py new file mode 100644 index 000000000000..4597986fee0d --- /dev/null +++ b/scripts/run_performance_tests.py @@ -0,0 +1,156 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""INSTRUCTIONS: + +The root folder MUST be named 'oppia'. + +Run all tests sequentially: +- run bash scripts/run_performance_tests.sh without args in order to run all + tests sequentially. + +Run test for a specific page: +- run bash scripts/run_performance_tests.sh --test_name=page_test + +page_test is the name of the file containing that test eg. splash_test. +""" +from __future__ import absolute_import # pylint: disable=import-only-modules + +import argparse +import atexit +import os +import subprocess +import sys +import time + +import python_utils + +from . import clean +from . import common +from . import setup +from . import setup_gae + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--skip_install', + help='optional; if specified, skips installing dependencies', + action='store_true') +_PARSER.add_argument( + '--run_minified_tests', + help='optional; if specified, runs frontend karma tests on both minified ' + 'and non-minified code', + action='store_true') +_PARSER.add_argument( + '--test_name', + help='If an argument is present then run test for that specific page. ' + 'Otherwise run tests for all the pages sequentially.') + + +def cleanup(): + """Send a kill signal to the dev server.""" + common.kill_process(9501) + + # Wait for the servers to go down; suppress 'connection refused' error + # output from nc since that is exactly what we are expecting to happen. + while not common.is_port_open(9501): + time.sleep(1) + + python_utils.PRINT('Done!') + + +def run_performance_test(test_name, xvfb_prefix): + """Runs the performance tests. + + Args: + test_name: str. The test name to be run. + xvfb_prefix: str. The XVFB prefix. + """ + subprocess.call(( + '%s python scripts/backend_tests.py ' + '--test_target=core.tests.performance_tests.%s' + % (xvfb_prefix, test_name)).split()) + + +def main(): + """Main function to run the performance tests.""" + setup.main() + setup_gae.main() + + parsed_args = _PARSER.parse_args() + setup.maybe_install_dependencies( + parsed_args.skip_install, parsed_args.run_minified_tests) + + if not common.is_port_open(8181): + python_utils.PRINT( + 'There is already a server running on localhost:8181') + python_utils.PRINT( + 'Please terminate it before running the performance tests.') + python_utils.PRINT('Exiting.') + sys.exit(1) + + # Forces the cleanup function to run on exit. + # Developers: note that at the end of this script, the cleanup() function at + # the top of the file is run. + atexit.register(cleanup) + + browsermob_proxy_path = os.path.join( + common.OPPIA_TOOLS_DIR, 'browsermob-proxy-2.1.1/bin/browsermob-proxy') + + # Change execute status of browsermob-proxy. + os.chmod(browsermob_proxy_path, 744) + + # Start a demo server. + subprocess.call(( + 'python %s/dev_appserver.py --host=0.0.0.0 --port=9501 ' + '--clear_datastore=yes --dev_appserver_log_level=critical ' + '--log_level=critical --skip_sdk_update_check=true app_dev.yaml)&' + % common.GOOGLE_APP_ENGINE_HOME).split()) + + # Wait for the servers to come up. + while common.is_port_open(9501): + time.sleep(1) + + # Install xvfb if not on travis, Used in frontend, e2e tests and performance + # tests. + if os.environ.get('TRAVIS'): + xvfb_prefix = '' + else: + # This installs xvfb for systems with apt-get installer like Ubuntu, and + # will fail for other systems. + # TODO(gvishal): Install/provide xvfb for other systems. + subprocess.call('sudo apt-get install xvfb'.split()) + xvfb_prefix = '/usr/bin/xvfb-run' + + # If an argument is present then run test for that specific page. Otherwise + # run tests for all the pages sequentially. + if parsed_args.test_name: + python_utils.PRINT( + 'Running performance test for: %s' % parsed_args.test_name) + run_performance_test(parsed_args.test_name, xvfb_prefix) + else: + run_performance_test('collection_player_test', xvfb_prefix) + run_performance_test('creator_dashboard_test', xvfb_prefix) + run_performance_test('exploration_editor_test', xvfb_prefix) + run_performance_test('exploration_player_test', xvfb_prefix) + run_performance_test('library_test', xvfb_prefix) + run_performance_test('profile_page_test', xvfb_prefix) + run_performance_test('splash_test', xvfb_prefix) + + os.chmod(browsermob_proxy_path, 644) + clean.delete_file('bmp.log') + clean.delete_file('server.log') + + +if __name__ == '__main__': + main() diff --git a/scripts/run_performance_tests.sh b/scripts/run_performance_tests.sh deleted file mode 100644 index ea30d2ce0993..000000000000 --- a/scripts/run_performance_tests.sh +++ /dev/null @@ -1,123 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2016 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# The root folder MUST be named 'oppia'. -# -# Run all tests sequentially: -# - run bash scripts/run_performance_tests.sh without args in order to run all tests sequentially -# -# Run test for a specific page: -# - run bash scripts/run_performance_tests.sh --test_name=page_test -# -# page_test is the name of the file containing that test eg. splash_test. - - -function cleanup { - # Send a kill signal to the dev server. - kill `ps aux | grep "[Dd]ev_appserver.py --host=0.0.0.0 --port=9501" | awk '{print $2}'` - - # Wait for the servers to go down; suppress "connection refused" error output - # from nc since that is exactly what we are expecting to happen. - while ( nc -vz localhost 9501 >/dev/null 2>&1 ); do sleep 1; done - - echo Done! -} - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -source $(dirname $0)/setup.sh || exit 1 -source $(dirname $0)/setup_gae.sh || exit 1 - -export DEFAULT_SKIP_INSTALLING_THIRD_PARTY_LIBS=false -export DEFAULT_RUN_MINIFIED_TESTS=false -maybeInstallDependencies "$@" - -if ( nc -vz localhost 8181 ); then - echo "" - echo " There is already a server running on localhost:8181." - echo " Please terminate it before running the performance tests." - echo " Exiting." - echo "" - exit 1 -fi - - -# Forces the cleanup function to run on exit. -# Developers: note that at the end of this script, the cleanup() function at -# the top of the file is run. -trap cleanup EXIT - -# Change execute status of browsermob-proxy -chmod 744 $TOOLS_DIR/browsermob-proxy-2.1.1/bin/browsermob-proxy - -# Start a demo server. -($PYTHON_CMD $GOOGLE_APP_ENGINE_HOME/dev_appserver.py --host=0.0.0.0 --port=9501 --clear_datastore=yes --dev_appserver_log_level=critical --log_level=critical --skip_sdk_update_check=true app_dev.yaml)& - -# Wait for the servers to come up. -while ! nc -vz localhost 9501; do sleep 1; done - -# Install xvfb if not on travis, Used in frontend, e2e tests and performance tests. -if [ "$TRAVIS" = true ]; then - export XVFB_PREFIX="" -else - # This installs xvfb for systems with apt-get installer like Ubuntu, and will fail for other systems. - # TODO(gvishal): Install/provide xvfb for other systems. - sudo apt-get install xvfb - export XVFB_PREFIX="/usr/bin/xvfb-run" -fi - -TEST_NAME="" -# Refer: http://stackoverflow.com/questions/192249/how-do-i-parse-command-line-arguments-in-bash -for i in "$@" -do -case $i in - --test_name=*) - TEST_NAME="${i#*=}" - ;; -esac -done - -# If an argument is present then run test for that specific page. Otherwise -# run tests for all the pages sequentially. -if [[ ! -z $TEST_NAME ]]; then - TEST_PATH="core.tests.performance_tests.$TEST_NAME" - echo "Running performance test for: $TEST_NAME, executing: $TEST_PATH" - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=$TEST_PATH -else - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=core.tests.performance_tests.collection_player_test - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=core.tests.performance_tests.creator_dashboard_test - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=core.tests.performance_tests.exploration_editor_test - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=core.tests.performance_tests.exploration_player_test - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=core.tests.performance_tests.library_test - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=core.tests.performance_tests.profile_page_test - $XVFB_PREFIX $PYTHON_CMD scripts/backend_tests.py --test_target=core.tests.performance_tests.splash_test -fi - -chmod 644 $TOOLS_DIR/browsermob-proxy-2.1.1/bin/browsermob-proxy -rm bmp.log server.log diff --git a/scripts/setup.py b/scripts/setup.py index 01ea82a3e75a..a3707eb37cf2 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -15,15 +15,18 @@ """This file should not be invoked directly, but called from other Python scripts. Python execution environent set up for all scripts. """ +from __future__ import absolute_import # pylint: disable=import-only-modules import os import shutil import subprocess import sys import tarfile -import urllib + +import python_utils from . import build +from . import common def delete_directory_tree(directory_path): @@ -58,15 +61,16 @@ def maybe_install_dependencies( subprocess.call('bash scripts/install_third_party.sh'.split()) # Ensure that generated JS and CSS files are in place before running the # tests. - print '' - print 'Running build task with concatenation only ' - print '' + python_utils.PRINT('') + python_utils.PRINT('Running build task with concatenation only') + python_utils.PRINT('') build.build() if run_minified_tests is True: - print '' - print 'Running build task with concatenation and minification' - print '' + python_utils.PRINT('') + python_utils.PRINT( + 'Running build task with concatenation and minification') + python_utils.PRINT('') subprocess.call('python scripts/build.py --prod_env'.split()) @@ -76,22 +80,24 @@ def maybe_install_dependencies( def test_python_version(): running_python_version = '{0[0]}.{0[1]}'.format(sys.version_info) if running_python_version != '2.7': - print 'Please use Python2.7. Exiting...' + python_utils.PRINT('Please use Python2.7. Exiting...') # If OS is Windows, print helpful error message about adding Python to # path. os_info = os.uname() if os_info[0] != 'Darwin' and os_info[0] != 'Linux': - print ( + python_utils.PRINT( 'It looks like you are using Windows. If you have Python ' 'installed,') - print 'make sure it is in your PATH and that PYTHONPATH is set.' - print ( + python_utils.PRINT( + 'make sure it is in your PATH and that PYTHONPATH is set.') + python_utils.PRINT( 'If you have two versions of Python (ie, Python 2.7 and 3), ' 'specify 2.7 before other versions of Python when setting the ' 'PATH.') - print 'Here are some helpful articles:' - print 'http://docs.python-guide.org/en/latest/starting/install/win/' - print ( + python_utils.PRINT('Here are some helpful articles:') + python_utils.PRINT( + 'http://docs.python-guide.org/en/latest/starting/install/win/') + python_utils.PRINT( 'https://stackoverflow.com/questions/3701646/how-to-add-to-the-' 'pythonpath-in-windows-7') # Exit when no suitable Python environment can be found. @@ -106,46 +112,49 @@ def main(): # folders. if not os.getcwd().endswith('oppia') and not os.getcwd().endswith( 'deploy-'): - print '' - print 'WARNING This script should be run from the oppia/ root folder.' - print '' + python_utils.PRINT('') + python_utils.PRINT( + 'WARNING This script should be run from the oppia/ root folder.') + python_utils.PRINT('') sys.exit(1) # Set COMMON_DIR to the absolute path of the directory above OPPIA_DIR. This # is necessary becaue COMMON_DIR (or subsequent variables which refer to it) # may use it in a situation where relative paths won't work as expected(such # as $PYTHONPATH). - curr_dir = os.path.abspath(os.getcwd()) - oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - - create_directory(oppia_tools_dir) + create_directory(common.OPPIA_TOOLS_DIR) create_directory('third_party/') create_directory('node_modules/') # Adjust the path to include a reference to node. - node_path = os.path.join(oppia_tools_dir, 'node-10.15.3') + node_path = os.path.join(common.OPPIA_TOOLS_DIR, 'node-10.15.3') os_info = os.uname() if os_info[0] != 'Darwin' and os_info[0] != 'Linux': # Node is a requirement for all installation scripts. Here, we check if # the OS supports node.js installation; if not, we exit with an error. - print '' - print 'WARNING: Unsupported OS for installation of node.js.' - print 'If you are running this script on Windows, see the instructions' - print 'here regarding installation of node.js:' - print '' - print ( + python_utils.PRINT('') + python_utils.PRINT( + 'WARNING: Unsupported OS for installation of node.js.') + python_utils.PRINT( + 'If you are running this script on Windows, see the instructions') + python_utils.PRINT( + 'here regarding installation of node.js:') + python_utils.PRINT('') + python_utils.PRINT( 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows' '%29') - print '' - print 'STATUS: Installation completed except for node.js. Exiting.' - print '' + python_utils.PRINT('') + python_utils.PRINT( + 'STATUS: Installation completed except for node.js. Exiting.') + python_utils.PRINT('') sys.exit(1) # Download and install node.js. - print 'Checking if node.js is installed in %s' % oppia_tools_dir + python_utils.PRINT( + 'Checking if node.js is installed in %s' % common.OPPIA_TOOLS_DIR) if not os.path.exists(node_path): - print 'Installing Node.js' + python_utils.PRINT('Installing Node.js') if os_info[0] == 'Darwin': if os_info[4] == 'x86_64': node_file_name = 'node-v10.15.3-darwin-x64' @@ -157,11 +166,11 @@ def main(): else: node_file_name = 'node-v10.15.3-linux-x86' - urllib.urlretrieve( + python_utils.url_retrieve( 'https://nodejs.org/dist/v10.15.3/%s.tar.gz' % node_file_name, filename='node-download.tgz') tar = tarfile.open(name='node-download.tgz') - tar.extractall(path=oppia_tools_dir) + tar.extractall(path=common.OPPIA_TOOLS_DIR) tar.close() os.remove('node-download.tgz') @@ -207,11 +216,11 @@ def main(): chrome_bin = ( '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome') else: - print 'Chrome is not found, stopping ...' + python_utils.PRINT('Chrome is not found, stopping ...') sys.exit(1) os.environ['CHROME_BIN'] = chrome_bin - print 'Environment setup completed.' + python_utils.PRINT('Environment setup completed.') if __name__ == '__main__': diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 26218835f2bf..662a6c9c1185 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -15,31 +15,30 @@ """This file should not be invoked directly, but called from other Python scripts. Python execution environment setup for scripts that require GAE. """ +from __future__ import absolute_import # pylint: disable=import-only-modules import os import sys import tarfile -import urllib import zipfile +import python_utils + +from . import common + def main(): """Runs the script to setup GAE.""" - curr_dir = os.path.abspath(os.getcwd()) - oppia_tools_dir = os.path.join(curr_dir, '..', 'oppia_tools') - google_app_engine_home = os.path.join( - oppia_tools_dir, 'google_appengine_1.9.67/google_appengine') - google_cloud_sdk_home = os.path.join( - oppia_tools_dir, 'google-cloud-sdk-251.0.0/google-cloud-sdk') - coverage_home = os.path.join(oppia_tools_dir, 'coverage-4.5.4') + coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') # Note that if the following line is changed so that it uses webob_1_1_1, # PUT requests from the frontend fail. sys.path.append('.') sys.path.append(coverage_home) - sys.path.append(google_app_engine_home) - sys.path.append(os.path.join(google_app_engine_home, 'lib/webob_0_9')) - sys.path.append(os.path.join(oppia_tools_dir, 'webtest-2.0.33')) + sys.path.append(common.GOOGLE_APP_ENGINE_HOME) + sys.path.append( + os.path.join(common.GOOGLE_APP_ENGINE_HOME, 'lib/webob_0_9')) + sys.path.append(os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.33')) # Delete old *.pyc files. for directory, _, files in os.walk('.'): @@ -48,44 +47,48 @@ def main(): filepath = os.path.join(directory, file_name) os.remove(filepath) - print ( + python_utils.PRINT( 'Checking whether Google App Engine is installed in %s' - % google_app_engine_home) - if not os.path.exists(google_app_engine_home): - print 'Downloading Google App Engine (this may take a little while)...' - os.makedirs(google_app_engine_home) + % common.GOOGLE_APP_ENGINE_HOME) + if not os.path.exists(common.GOOGLE_APP_ENGINE_HOME): + python_utils.PRINT( + 'Downloading Google App Engine (this may take a little while)...') + os.makedirs(common.GOOGLE_APP_ENGINE_HOME) try: - urllib.urlretrieve( + python_utils.url_retrieve( 'https://storage.googleapis.com/appengine-sdks/featured/' 'google_appengine_1.9.67.zip', filename='gae-download.zip') except Exception: - print 'Error downloading Google App Engine. Exiting.' + python_utils.PRINT('Error downloading Google App Engine. Exiting.') sys.exit(1) - print 'Download complete. Installing Google App Engine...' + python_utils.PRINT('Download complete. Installing Google App Engine...') with zipfile.ZipFile('gae-download.zip', 'r') as zip_ref: zip_ref.extractall( - path=os.path.join(oppia_tools_dir, 'google_appengine_1.9.67/')) + path=os.path.join( + common.OPPIA_TOOLS_DIR, 'google_appengine_1.9.67/')) os.remove('gae-download.zip') - print ( + python_utils.PRINT( 'Checking whether google-cloud-sdk is installed in %s' - % google_cloud_sdk_home) - if not os.path.exists(google_cloud_sdk_home): - print 'Downloading Google Cloud SDK (this may take a little while)...' - os.makedirs(google_cloud_sdk_home) + % common.GOOGLE_CLOUD_SDK_HOME) + if not os.path.exists(common.GOOGLE_CLOUD_SDK_HOME): + python_utils.PRINT( + 'Downloading Google Cloud SDK (this may take a little while)...') + os.makedirs(common.GOOGLE_CLOUD_SDK_HOME) try: - urllib.urlretrieve( + python_utils.url_retrieve( 'https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/' 'google-cloud-sdk-251.0.0-linux-x86_64.tar.gz', filename='gcloud-sdk.tar.gz') except Exception: - print 'Error downloading Google Cloud SDK. Exiting.' + python_utils.PRINT('Error downloading Google Cloud SDK. Exiting.') sys.exit(1) - print 'Download complete. Installing Google Cloud SDK...' + python_utils.PRINT('Download complete. Installing Google Cloud SDK...') tar = tarfile.open(name='gcloud-sdk.tar.gz') tar.extractall( - path=os.path.join(oppia_tools_dir, 'google-cloud-sdk-251.0.0/')) + path=os.path.join( + common.OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/')) tar.close() os.remove('gcloud-sdk.tar.gz') From d3a24649b68e583854889fb862c6bce6b5634343 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 18:37:53 +0530 Subject: [PATCH 023/141] fix comment --- scripts/run_performance_tests.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 4597986fee0d..b96b8395d3ec 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -17,11 +17,11 @@ The root folder MUST be named 'oppia'. Run all tests sequentially: -- run bash scripts/run_performance_tests.sh without args in order to run all +- run python -m scripts.run_performance_tests without args in order to run all tests sequentially. Run test for a specific page: -- run bash scripts/run_performance_tests.sh --test_name=page_test +- run python -m scripts.run_performance_tests --test_name=page_test page_test is the name of the file containing that test eg. splash_test. """ From 275d886fcf693bec932adf8b005efb4b65b57991 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 22:10:40 +0530 Subject: [PATCH 024/141] convert --- scripts/common.py | 1 + scripts/create_expression_parser.py | 6 +- scripts/install_third_party_libs.py | 10 +- scripts/run_e2e_tests.py | 11 +- scripts/setup.py | 5 +- scripts/start.sh | 170 ---------------------------- 6 files changed, 17 insertions(+), 186 deletions(-) delete mode 100755 scripts/start.sh diff --git a/scripts/common.py b/scripts/common.py index 844d56d0bb9f..59d4a7c9ebf1 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -36,6 +36,7 @@ OPPIA_TOOLS_DIR, 'google_appengine_1.9.67/google_appengine') GOOGLE_CLOUD_SDK_HOME = os.path.join( OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/google-cloud-sdk') +NODE_PATH = os.path.join(OPPIA_TOOLS_DIR, 'node-10.15.3') def ensure_directory_exists(d): diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index caba3c94eac3..0b7943a4a00e 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -43,19 +43,19 @@ def main(): # Install the basic environment, e.g. nodejs. install_third_party_libs.main() - node_path = os.path.join(common.OPPIA_TOOLS_DIR, 'node-10.15.3') python_utils.PRINT( 'Checking whether pegjs is installed in %s' % common.OPPIA_TOOLS_DIR) if not os.path.exists('node_modules/pegjs'): python_utils.PRINT('Installing pegjs') - subprocess.call(('%s/bin/npm install pegjs@0.8.0' % node_path).split()) + subprocess.call(( + '%s/bin/npm install pegjs@0.8.0' % common.NODE_PATH).split()) subprocess.call(( 'node_modules/pegjs/bin/pegjs %s %s' % (expression_parser_definition, expression_parser_js)).split()) - for line in fileinput.input(files=expression_parser_js, inplace=True): + for line in fileinput.input(files=[expression_parser_js], inplace=True): python_utils.PRINT( re.sub( r'module\.exports.*$', diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 3007e099e731..a0d9c35e569c 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -107,7 +107,6 @@ def main(): help='optional; if specified, skips installation of skulpt.', action='store_true') - node_path = os.path.join(common.OPPIA_TOOLS_DIR, 'node-10.15.3') third_party_dir = os.path.join('.', 'third_party') pip_dependencies = [ @@ -141,11 +140,12 @@ def main(): install_third_party.main() # Install third-party node modules needed for the build process. - subprocess.call(('%s/bin/npm install --only=dev' % node_path).split()) + subprocess.call(( + '%s/bin/npm install --only=dev' % common.NODE_PATH).split()) # This line removes the 'npm ERR! missing:' messages. For reference, see # this thread: https://github.com/npm/npm/issues/19393#issuecomment- # 374076889. - subprocess.call(('%s/bin/npm dedupe' % node_path).split()) + subprocess.call(('%s/bin/npm dedupe' % common.NODE_PATH).split()) # Download and install Skulpt. Skulpt is built using a Python script # included within the Skulpt repository (skulpt.py). This script normally @@ -189,9 +189,9 @@ def main(): # and generating documentation and are not necessary when building # Skulpt. for line in fileinput.input( - files=os.path.join( + files=[os.path.join( common.OPPIA_TOOLS_DIR, - 'skulpt-0.10.0/skulpt/skulpt.py')): + 'skulpt-0.10.0/skulpt/skulpt.py')]): # Inside this loop the STDOUT will be redirected to the file. # The comma after each python_utils.PRINT statement is needed to # avoid double line breaks. diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index c9c64a932609..8ad0babc56e4 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -46,6 +46,7 @@ import atexit import fileinput import os +import re import shutil import subprocess import sys @@ -143,17 +144,19 @@ def main(): if parsed_args.prod_env: python_utils.PRINT('Generating files for production mode...') constants_env_variable = '\'DEV_MODE\': false' - for line in fileinput.input(files='assets/constants.js', inplace=True): + for line in fileinput.input( + files=['assets/constants.js'], inplace=True): python_utils.PRINT( - line.replace('\'DEV_MODE\': .*', constants_env_variable), + re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') subprocess.call('python scripts/build.py --prod_env'.split()) app_yaml_filepath = 'app.yaml' else: constants_env_variable = '\'DEV_MODE\': true' - for line in fileinput.input(files='assets/constants.js', inplace=True): + for line in fileinput.input( + files=['assets/constants.js'], inplace=True): python_utils.PRINT( - line.replace('\'DEV_MODE\': .*', constants_env_variable), + re.sub('\'DEV_MODE\': .*', constants_env_variable, line), end='') build.build() app_yaml_filepath = 'app_dev.yaml' diff --git a/scripts/setup.py b/scripts/setup.py index a3707eb37cf2..fe73744de9b1 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -126,9 +126,6 @@ def main(): create_directory('third_party/') create_directory('node_modules/') - # Adjust the path to include a reference to node. - node_path = os.path.join(common.OPPIA_TOOLS_DIR, 'node-10.15.3') - os_info = os.uname() if os_info[0] != 'Darwin' and os_info[0] != 'Linux': # Node is a requirement for all installation scripts. Here, we check if @@ -153,7 +150,7 @@ def main(): # Download and install node.js. python_utils.PRINT( 'Checking if node.js is installed in %s' % common.OPPIA_TOOLS_DIR) - if not os.path.exists(node_path): + if not os.path.exists(common.NODE_PATH): python_utils.PRINT('Installing Node.js') if os_info[0] == 'Darwin': if os_info[4] == 'x86_64': diff --git a/scripts/start.sh b/scripts/start.sh deleted file mode 100755 index 07028f85ed13..000000000000 --- a/scripts/start.sh +++ /dev/null @@ -1,170 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# This script starts up a development server running Oppia. It installs any -# missing third-party dependencies and starts up a local GAE development -# server. -# -# Run the script from the oppia root folder: -# -# bash scripts/start.sh -# -# Note that the root folder MUST be named 'oppia'. - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - exit 1 -fi - -if [ -e "/etc/is_vagrant_vm" ] -then - source $(dirname $0)/vagrant_lock.sh || exit 1 -fi - -set -e -source $(dirname $0)/setup.sh || exit 1 -source $(dirname $0)/setup_gae.sh || exit 1 -set -- "${remaining_params[@]}" - -# Install third party dependencies. -bash scripts/install_third_party.sh - -echo 'Oppia setup complete!' - -# Check that there isn't a server already running. -if ( nc -vz localhost 8181 >/dev/null 2>&1 ); then - echo "" - echo " WARNING" - echo " Could not start new server. There is already an existing server" - echo " running at port 8181." - echo "" - exit 1 -fi - -# Argument passed to dev_appserver.py to indicate whether or not to -# clear the datastore. -CLEAR_DATASTORE_ARG="--clear_datastore=true" -ENABLE_CONSOLE_ARG="" - -# Argument passed to feconf.py to help choose production templates folder. -FORCE_PROD_MODE=False -START_BROWSER=True -for arg in "$@"; do - if [ "$arg" == "--save_datastore" ]; then - CLEAR_DATASTORE_ARG="" - fi - if [ "$arg" == "--enable_console" ]; then - ENABLE_CONSOLE_ARG="--enable_console=true" - fi - # Used to emulate running Oppia in a production environment. - if [ "$arg" == "--prod_env" ]; then - FORCE_PROD_MODE=True - fi - if [ "$arg" == "--no-browser" ]; then - START_BROWSER=False - fi -done - -if [[ "$FORCE_PROD_MODE" == "True" ]]; then - constants_env_variable="\"DEV_MODE\": false" - sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.js - $PYTHON_CMD scripts/build.py --prod_env --enable_watcher - APP_YAML_FILEPATH="app.yaml" -else - constants_env_variable="\"DEV_MODE\": true" - sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.js - $PYTHON_CMD scripts/build.py --enable_watcher - APP_YAML_FILEPATH="app_dev.yaml" -fi - -# Delete the modified feconf.py file(-i.bak) -rm assets/constants.js.bak - -# Set up a local dev instance. -# TODO(sll): do this in a new shell. -# To turn emailing on, add the option '--enable_sendmail=yes' and change the relevant -# settings in feconf.py. Be careful with this -- you do not want to spam people -# accidentally! - -if ! [[ "$FORCE_PROD_MODE" == "True" ]]; then - ($NODE_PATH/bin/node $NODE_MODULE_DIR/gulp/bin/gulp.js watch)& - # In prod mode webpack is launched through scripts/build.py - echo Compiling webpack... - $NODE_MODULE_DIR/webpack/bin/webpack.js --config webpack.dev.config.ts - ($NODE_MODULE_DIR/webpack/bin/webpack.js --config webpack.dev.config.ts --watch)& -fi -echo Starting GAE development server -(python $GOOGLE_APP_ENGINE_HOME/dev_appserver.py $CLEAR_DATASTORE_ARG $ENABLE_CONSOLE_ARG --admin_host 0.0.0.0 --admin_port 8000 --host 0.0.0.0 --port 8181 --skip_sdk_update_check true $APP_YAML_FILEPATH)& - -# Wait for the servers to come up. -while ! nc -vz localhost 8181 >/dev/null 2>&1; do sleep 1; done - -# Launch a browser window. -if [ ${OS} == "Linux" ] && [ "$START_BROWSER" == "True" ]; then - detect_virtualbox="$(ls -1 /dev/disk/by-id/)" - if [[ $detect_virtualbox = *"VBOX"* ]]; then - echo "" - echo " INFORMATION" - echo " Setting up a local development server. You can access this server" - echo " by navigating to localhost:8181 in a browser window." - echo "" - else - echo "" - echo " INFORMATION" - echo " Setting up a local development server at localhost:8181. Opening a" - echo " default browser window pointing to this server." - echo "" - (sleep 5; xdg-open http://localhost:8181/ )& - fi -elif [ ${OS} == "Darwin" ] && [ "$START_BROWSER" == "True" ]; then - echo "" - echo " INFORMATION" - echo " Setting up a local development server at localhost:8181. Opening a" - echo " default browser window pointing to this server." - echo "" - (sleep 5; open http://localhost:8181/ )& -else - echo "" - echo " INFORMATION" - echo " Setting up a local development server. You can access this server" - echo " by navigating to localhost:8181 in a browser window." - echo "" -fi - -echo Done! - -# Function for waiting for the servers to go down. -function cleanup { - echo "" - echo " INFORMATION" - echo " Cleaning up the servers." - echo "" - while ( nc -vz localhost 8181 >/dev/null 2>&1 ); do sleep 1; done -} - -# Runs cleanup function on exit. -trap cleanup Exit - -wait From 34828649393c1a5b3232152b09d123a3dfa55a60 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 22:10:48 +0530 Subject: [PATCH 025/141] convert --- scripts/start.py | 206 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 206 insertions(+) create mode 100644 scripts/start.py diff --git a/scripts/start.py b/scripts/start.py new file mode 100644 index 000000000000..cb209b6aec60 --- /dev/null +++ b/scripts/start.py @@ -0,0 +1,206 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""INSTRUCTIONS: + +This script starts up a development server running Oppia. It installs any +missing third-party dependencies and starts up a local GAE development +server. + +Run the script from the oppia root folder: + + python -m scripts.start + +Note that the root folder MUST be named 'oppia'. +""" +from __future__ import absolute_import # pylint: disable=import-only-modules + +import argparse +import atexit +import fileinput +import os +import re +import subprocess +import time + +import python_utils + +from . import common +from . import install_third_party_libs +from . import setup +from . import setup_gae +from . import vagrant_lock + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--save_datastore', + help='optional; if specified, does not clear the datastore.', + action='store_true') +_PARSER.add_argument( + '--enable_console', + help='optional; if specified, enables console.', + action='store_true') +_PARSER.add_argument( + '--prod_env', + help='optional; if specified, runs Oppia in a production environment.', + action='store_true') +_PARSER.add_argument( + '--no_browser', + help='optional; if specified, does not open a browser.', + action='store_true') + + +def cleanup(): + """Function for waiting for the servers to go down.""" + python_utils.PRINT('') + python_utils.PRINT('INFORMATION') + python_utils.PRINT('Cleaning up the servers.') + python_utils.PRINT('') + while not common.is_port_open(8181): + time.sleep(1) + + +def main(): + """Starts up a development server running Oppia.""" + if os.path.isfile('/etc/is_vagrant_vm'): + vagrant_lock.main() + + setup.main() + setup_gae.main() + + # Runs cleanup function on exit. + atexit.register(cleanup) + + # Install third party dependencies. + install_third_party_libs.main() + + python_utils.PRINT('Oppia setup complete!') + + # Check that there isn't a server already running. + if not common.is_port_open(8181): + python_utils.PRINT('') + python_utils.PRINT('WARNING') + python_utils.PRINT( + 'Could not start new server. There is already an existing server') + python_utils.PRINT('running at port 8181.') + python_utils.PRINT('') + + parsed_args = _PARSER.parse_args() + clear_datastore_arg = ( + '' if parsed_args.save_datastore else '--clear_datastore=true') + enable_console_arg = ( + '--enable_console=true' if parsed_args.enable_console else '') + + if parsed_args.prod_env: + constants_env_variable = '\'DEV_MODE\': false' + for line in fileinput.input( + files=['assets/constants.js'], inplace=True): + python_utils.PRINT( + re.sub( + '\'DEV_MODE\': .*', constants_env_variable, line), end='') + subprocess.call( + 'python scripts/build.py --prod_env --enable_watcher'.split()) + app_yaml_filepath = 'app.yaml' + else: + constants_env_variable = '\'DEV_MODE\': true' + for line in fileinput.input( + files=['assets/constants.js'], inplace=True): + python_utils.PRINT( + re.sub( + '\'DEV_MODE\': .*', constants_env_variable, line), end='') + subprocess.call('python scripts/build.py --enable_watcher'.split()) + app_yaml_filepath = 'app_dev.yaml' + + # Delete the modified feconf.py file(-i.bak) + os.remove('assets/constants.js.bak') + + # Set up a local dev instance. + # TODO(sll): do this in a new shell. + # To turn emailing on, add the option '--enable_sendmail=yes' and change the + # relevant settings in feconf.py. Be careful with this -- you do not want to + # spam people accidentally. + if not parsed_args.prod_env: + subprocess.call(( + '%s/bin/node node_modules/gulp/bin/gulp.js watch)&' + % common.NODE_PATH).split()) + # In prod mode webpack is launched through scripts/build.py + python_utils.PRINT('Compiling webpack...') + subprocess.call( + 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' + .split()) + subprocess.call( + 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' + ' --watch)&'.split()) + + python_utils.PRINT('Starting GAE development server') + subprocess.call(( + 'python %s/dev_appserver.py %s %s --admin_host 0.0.0.0 --admin_port ' + '8000 --host 0.0.0.0 --port 8181 --skip_sdk_update_check true %s)&' + % ( + common.GOOGLE_APP_ENGINE_HOME, clear_datastore_arg, + enable_console_arg, app_yaml_filepath)).split()) + + # Wait for the servers to come up. + while common.is_port_open(8181): + time.sleep(1) + + os_info = os.uname() + # Launch a browser window. + if os_info[0] == 'Linux' and not parsed_args.no_browser: + detect_virtualbox_pattern = re.compile('.*VBOX.*') + if list(filter( + detect_virtualbox_pattern.match, + os.listdir('/dev/disk/by-id/'))): + python_utils.PRINT('') + python_utils.PRINT('INFORMATION') + python_utils.PRINT( + 'Setting up a local development server. You can access this ' + 'server') + python_utils.PRINT( + 'by navigating to localhost:8181 in a browser window.') + python_utils.PRINT('') + else: + python_utils.PRINT('') + python_utils.PRINT('INFORMATION') + python_utils.PRINT( + 'Setting up a local development server at localhost:8181. ' + 'Opening a') + python_utils.PRINT('default browser window pointing to this server') + python_utils.PRINT('') + time.sleep(5) + subprocess.call('xdg-open http://localhost:8181/ )&'.split()) + elif os_info[0] == 'Darwin' and not parsed_args.no_browser: + python_utils.PRINT('') + python_utils.PRINT('INFORMATION') + python_utils.PRINT( + 'Setting up a local development server at localhost:8181. ' + 'Opening a') + python_utils.PRINT('default browser window pointing to this server.') + python_utils.PRINT('') + time.sleep(5) + subprocess.call('open http://localhost:8181/ )&'.split()) + else: + python_utils.PRINT('') + python_utils.PRINT('INFORMATION') + python_utils.PRINT( + 'Setting up a local development server. You can access this server') + python_utils.PRINT( + 'by navigating to localhost:8181 in a browser window.') + python_utils.PRINT('') + + python_utils.PRINT('Done!') + + +if __name__ == '__main__': + main() From 0ab00ec10b81eab90575c2bbfe148dcbb979ab6d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 25 Aug 2019 23:41:36 +0530 Subject: [PATCH 026/141] fix --- scripts/install_third_party_libs.py | 4 ++-- scripts/run_e2e_tests.py | 19 ++++++++-------- scripts/run_frontend_tests.py | 34 ++++++++++++++++------------- scripts/run_performance_tests.py | 8 ++++--- scripts/setup.py | 2 +- scripts/start.py | 33 +++++++++++++++------------- 6 files changed, 55 insertions(+), 45 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index a0d9c35e569c..ebd5c117f5e3 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -238,11 +238,11 @@ def main(): # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') - subprocess.call('python scripts/pre_commit_hook.py --install'.split()) + subprocess.call('python -m scripts.pre_commit_hook --install'.split()) # Install pre-push script. python_utils.PRINT('Installing pre-push hook for git') - subprocess.call('python scripts/pre_push_hook.py --install'.split()) + subprocess.call('python -m scripts.pre_push_hook --install'.split()) if __name__ == '__main__': diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 8ad0babc56e4..85a6287f6415 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -149,7 +149,7 @@ def main(): python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - subprocess.call('python scripts/build.py --prod_env'.split()) + subprocess.call('python -m scripts.build --prod_env'.split()) app_yaml_filepath = 'app.yaml' else: constants_env_variable = '\'DEV_MODE\': true' @@ -161,9 +161,6 @@ def main(): build.build() app_yaml_filepath = 'app_dev.yaml' - # Delete the modified feconf.py file(-i.bak) - os.remove('assets/constants.js.bak') - # Start a selenium server using chromedriver 2.41. # The 'detach' option continues the flow once the server is up and runnning. # The 'quiet' option prints only the necessary information about the server @@ -179,14 +176,15 @@ def main(): # info logs to stderr so we discard them. # TODO(jacob): Find a webdriver or selenium argument that controls log # level. - subprocess.call( - 'node_modules/.bin/webdriver-manager start 2>/dev/null)&'.split()) + background_processes = [] + background_processes.append(subprocess.Popen( + 'node_modules/.bin/webdriver-manager start 2>/dev/null'.split())) # Start a demo server. - subprocess.call( + background_processes.append(subprocess.Popen( ('python %s/dev_appserver.py --host=0.0.0.0 --port=9001 ' '--clear_datastore=yes --dev_appserver_log_level=critical ' - '--log_level=critical --skip_sdk_update_check=true $%s)&' - % (common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath)).split()) + '--log_level=critical --skip_sdk_update_check=true $%s' + % (common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath)).split())) # Wait for the servers to come up. while common.is_port_open(4444) or common.is_port_open(9001): @@ -232,6 +230,9 @@ def main(): parsed_args.sharding, parsed_args.sharding_instances, parsed_args.suite)).split()) + for process in background_processes: + process.wait() + if __name__ == '__main__': main() diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 5fca47587a7b..2d7e0259771d 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -27,11 +27,14 @@ Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a single test or test suite. """ +from __future__ import absolute_import # pylint: disable=import-only-modules import argparse import os import subprocess +import python_utils + from . import build from . import setup from . import setup_gae @@ -58,16 +61,17 @@ def main(): parsed_args = _PARSER.parse_args() setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - print '' - print ' View interactive frontend test coverage reports by navigating to' - print '' - print ' ../karma_coverage_reports' - print '' - print ' on your filesystem.' - print '' - print '' - print ' Running test in development environment' - print '' + python_utils.PRINT('') + python_utils.PRINT( + 'View interactive frontend test coverage reports by navigating to') + python_utils.PRINT('') + python_utils.PRINT(' ../karma_coverage_reports') + python_utils.PRINT('') + python_utils.PRINT(' on your filesystem.') + python_utils.PRINT('') + python_utils.PRINT('') + python_utils.PRINT('Running test in development environment') + python_utils.PRINT('') build.build() @@ -77,12 +81,12 @@ def main(): subprocess.call(start_tests_cmd.split()) if parsed_args.run_minified_tests is True: - print '' - print ' Running test in production environment' - print '' + python_utils.PRINT('') + python_utils.PRINT('Running test in production environment') + python_utils.PRINT('') subprocess.call( - 'python scripts/build.py --prod_env --minify_third_party_libs_only' + 'python -m scripts.build --prod_env --minify_third_party_libs_only' .split()) start_tests_cmd = ( @@ -90,7 +94,7 @@ def main(): 'core/tests/karma.conf.ts --prodEnv' % xvfb_prefix) subprocess.call(start_tests_cmd.split()) - print 'Done!' + python_utils.PRINT('Done!') if __name__ == '__main__': diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index b96b8395d3ec..e239c1a5efad 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -77,7 +77,7 @@ def run_performance_test(test_name, xvfb_prefix): xvfb_prefix: str. The XVFB prefix. """ subprocess.call(( - '%s python scripts/backend_tests.py ' + '%s python -m scripts.backend_tests ' '--test_target=core.tests.performance_tests.%s' % (xvfb_prefix, test_name)).split()) @@ -111,10 +111,10 @@ def main(): os.chmod(browsermob_proxy_path, 744) # Start a demo server. - subprocess.call(( + background_process = subprocess.Popen(( 'python %s/dev_appserver.py --host=0.0.0.0 --port=9501 ' '--clear_datastore=yes --dev_appserver_log_level=critical ' - '--log_level=critical --skip_sdk_update_check=true app_dev.yaml)&' + '--log_level=critical --skip_sdk_update_check=true app_dev.yaml' % common.GOOGLE_APP_ENGINE_HOME).split()) # Wait for the servers to come up. @@ -151,6 +151,8 @@ def main(): clean.delete_file('bmp.log') clean.delete_file('server.log') + background_process.wait() + if __name__ == '__main__': main() diff --git a/scripts/setup.py b/scripts/setup.py index fe73744de9b1..30b2c3e2d950 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -71,7 +71,7 @@ def maybe_install_dependencies( python_utils.PRINT( 'Running build task with concatenation and minification') python_utils.PRINT('') - subprocess.call('python scripts/build.py --prod_env'.split()) + subprocess.call('python -m scripts.build --prod_env'.split()) # This function takes a command for python as its only input. diff --git a/scripts/start.py b/scripts/start.py index cb209b6aec60..36d10ba76a48 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -110,7 +110,7 @@ def main(): re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') subprocess.call( - 'python scripts/build.py --prod_env --enable_watcher'.split()) + 'python -m scripts.build --prod_env --enable_watcher'.split()) app_yaml_filepath = 'app.yaml' else: constants_env_variable = '\'DEV_MODE\': true' @@ -119,37 +119,35 @@ def main(): python_utils.PRINT( re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') - subprocess.call('python scripts/build.py --enable_watcher'.split()) + subprocess.call('python -m scripts.build --enable_watcher'.split()) app_yaml_filepath = 'app_dev.yaml' - # Delete the modified feconf.py file(-i.bak) - os.remove('assets/constants.js.bak') - # Set up a local dev instance. # TODO(sll): do this in a new shell. # To turn emailing on, add the option '--enable_sendmail=yes' and change the # relevant settings in feconf.py. Be careful with this -- you do not want to # spam people accidentally. + background_processes = [] if not parsed_args.prod_env: - subprocess.call(( - '%s/bin/node node_modules/gulp/bin/gulp.js watch)&' - % common.NODE_PATH).split()) + background_processes.append(subprocess.Popen(( + '%s/bin/node node_modules/gulp/bin/gulp.js watch' + % common.NODE_PATH).split())) # In prod mode webpack is launched through scripts/build.py python_utils.PRINT('Compiling webpack...') subprocess.call( 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' .split()) - subprocess.call( + background_processes.append(subprocess.Popen( 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' - ' --watch)&'.split()) + ' --watch'.split())) python_utils.PRINT('Starting GAE development server') - subprocess.call(( + background_processes.append(subprocess.Popen(( 'python %s/dev_appserver.py %s %s --admin_host 0.0.0.0 --admin_port ' - '8000 --host 0.0.0.0 --port 8181 --skip_sdk_update_check true %s)&' + '8000 --host 0.0.0.0 --port 8181 --skip_sdk_update_check true %s' % ( common.GOOGLE_APP_ENGINE_HOME, clear_datastore_arg, - enable_console_arg, app_yaml_filepath)).split()) + enable_console_arg, app_yaml_filepath)).split())) # Wait for the servers to come up. while common.is_port_open(8181): @@ -179,7 +177,8 @@ def main(): python_utils.PRINT('default browser window pointing to this server') python_utils.PRINT('') time.sleep(5) - subprocess.call('xdg-open http://localhost:8181/ )&'.split()) + background_processes.append( + subprocess.Popen('xdg-open http://localhost:8181/'.split())) elif os_info[0] == 'Darwin' and not parsed_args.no_browser: python_utils.PRINT('') python_utils.PRINT('INFORMATION') @@ -189,7 +188,8 @@ def main(): python_utils.PRINT('default browser window pointing to this server.') python_utils.PRINT('') time.sleep(5) - subprocess.call('open http://localhost:8181/ )&'.split()) + background_processes.append( + subprocess.Popen('open http://localhost:8181/'.split())) else: python_utils.PRINT('') python_utils.PRINT('INFORMATION') @@ -201,6 +201,9 @@ def main(): python_utils.PRINT('Done!') + for process in background_processes: + process.wait() + if __name__ == '__main__': main() From cbb09a7f8eed894b4716ac5a29e6d00fe702f7de Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 00:05:57 +0530 Subject: [PATCH 027/141] address comments --- scripts/common.py | 1 + scripts/install_third_party_libs.py | 135 ++++++++++++++-------------- 2 files changed, 69 insertions(+), 67 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index 59d4a7c9ebf1..a2f60dae9b75 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -32,6 +32,7 @@ RELEASE_BRANCH_NAME_PREFIX = 'release-' CURR_DIR = os.path.abspath(os.getcwd()) OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') +THIRD_PARTY_DIR = os.path.join('.', 'third_party') GOOGLE_APP_ENGINE_HOME = os.path.join( OPPIA_TOOLS_DIR, 'google_appengine_1.9.67/google_appengine') GOOGLE_CLOUD_SDK_HOME = os.path.join( diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index ebd5c117f5e3..bae65b6044d0 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -29,7 +29,15 @@ from . import install_third_party from . import setup -OPPIA_DIR = os.getcwd() +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--nojsrepl', + help='optional; if specified, skips installation of skulpt.', + action='store_true') +_PARSER.add_argument( + '--noskulpt', + help='optional; if specified, skips installation of skulpt.', + action='store_true') @contextlib.contextmanager @@ -94,77 +102,27 @@ def pip_install(package, version, install_path): 'install', '%s==%s' % (package, version), '--target', install_path]) -def main(): - """Install third-party libraries for Oppia.""" - - _parser = argparse.ArgumentParser() - _parser.add_argument( - '--nojsrepl', - help='optional; if specified, skips installation of skulpt.', - action='store_true') - _parser.add_argument( - '--noskulpt', - help='optional; if specified, skips installation of skulpt.', - action='store_true') - - third_party_dir = os.path.join('.', 'third_party') - - pip_dependencies = [ - ('future', '0.17.1', third_party_dir), - ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR), - ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR), - ('pylint-quotes', '0.2.1', common.OPPIA_TOOLS_DIR), - ('webtest', '2.0.33', common.OPPIA_TOOLS_DIR), - ('isort', '4.3.20', common.OPPIA_TOOLS_DIR), - ('pycodestyle', '2.5.0', common.OPPIA_TOOLS_DIR), - ('esprima', '4.0.1', common.OPPIA_TOOLS_DIR), - ('browsermob-proxy', '0.8.0', common.OPPIA_TOOLS_DIR), - ('selenium', '3.13.0', common.OPPIA_TOOLS_DIR), - ('PyGithub', '1.43.7', common.OPPIA_TOOLS_DIR), - ('psutil', '5.6.3', common.OPPIA_TOOLS_DIR), - ] - - for package, version, path in pip_dependencies: - python_utils.PRINT( - 'Checking if %s is installed in %s' % (package, path)) - - exact_lib_path = os.path.join(path, '%s-%s' % (package, version)) - if not os.path.exists(exact_lib_path): - python_utils.PRINT('Installing %s' % package) - pip_install(package, version, exact_lib_path) - - setup.main() - - # Download and install required JS and zip files. - python_utils.PRINT('Installing third-party JS libraries and zip files.') - install_third_party.main() - - # Install third-party node modules needed for the build process. - subprocess.call(( - '%s/bin/npm install --only=dev' % common.NODE_PATH).split()) - # This line removes the 'npm ERR! missing:' messages. For reference, see - # this thread: https://github.com/npm/npm/issues/19393#issuecomment- - # 374076889. - subprocess.call(('%s/bin/npm dedupe' % common.NODE_PATH).split()) - - # Download and install Skulpt. Skulpt is built using a Python script - # included within the Skulpt repository (skulpt.py). This script normally - # requires GitPython, however the patches to it below - # (with the sed operations) lead to it no longer being required. The Python - # script is used to avoid having to manually recreate the Skulpt dist build - # process in install_third_party.py. Note that skulpt.py will issue a - # warning saying its dist command will not work properly without GitPython, - # but it does actually work due to the patches. +def install_skulpt(): + """Download and install Skulpt. Skulpt is built using a Python script + included within the Skulpt repository (skulpt.py). This script normally + requires GitPython, however the patches to it below + (with the sed operations) lead to it no longer being required. The Python + script is used to avoid having to manually recreate the Skulpt dist build + process in install_third_party.py. Note that skulpt.py will issue a + warning saying its dist command will not work properly without GitPython, + but it does actually work due to the patches. + """ # We use parse_known_args() to ignore the extra arguments which maybe used # while calling this method from other Python scripts. - parsed_args, _ = _parser.parse_known_args() + parsed_args, _ = _PARSER.parse_known_args() no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt python_utils.PRINT('Checking whether Skulpt is installed in third_party') if not os.path.exists( os.path.join( - third_party_dir, 'static/skulpt-0.10.0')) and not no_skulpt: + common.THIRD_PARTY_DIR, + 'static/skulpt-0.10.0')) and not no_skulpt: if not os.path.exists( os.path.join(common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0')): python_utils.PRINT('Downloading Skulpt') @@ -227,15 +185,58 @@ def main(): 'dist'.split()) # Return to the Oppia root folder. - os.chdir(OPPIA_DIR) + os.chdir(common.CURR_DIR) # Move the build directory to the static resources folder. - os.makedirs(os.path.join(third_party_dir, 'static/skulpt-0.10.0')) + os.makedirs( + os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) shutil.copytree( os.path.join( common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/dist/'), - os.path.join(third_party_dir, 'static/skulpt-0.10.0')) + os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) + + +def main(): + """Install third-party libraries for Oppia.""" + pip_dependencies = [ + ('future', '0.17.1', common.THIRD_PARTY_DIR), + ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR), + ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR), + ('pylint-quotes', '0.2.1', common.OPPIA_TOOLS_DIR), + ('webtest', '2.0.33', common.OPPIA_TOOLS_DIR), + ('isort', '4.3.20', common.OPPIA_TOOLS_DIR), + ('pycodestyle', '2.5.0', common.OPPIA_TOOLS_DIR), + ('esprima', '4.0.1', common.OPPIA_TOOLS_DIR), + ('browsermob-proxy', '0.8.0', common.OPPIA_TOOLS_DIR), + ('selenium', '3.13.0', common.OPPIA_TOOLS_DIR), + ('PyGithub', '1.43.7', common.OPPIA_TOOLS_DIR), + ('psutil', '5.6.3', common.OPPIA_TOOLS_DIR), + ] + + for package, version, path in pip_dependencies: + python_utils.PRINT( + 'Checking if %s is installed in %s' % (package, path)) + + exact_lib_path = os.path.join(path, '%s-%s' % (package, version)) + if not os.path.exists(exact_lib_path): + python_utils.PRINT('Installing %s' % package) + pip_install(package, version, exact_lib_path) + + setup.main() + + # Download and install required JS and zip files. + python_utils.PRINT('Installing third-party JS libraries and zip files.') + install_third_party.main() + + # Install third-party node modules needed for the build process. + subprocess.call(( + '%s/bin/npm install --only=dev' % common.NODE_PATH).split()) + # This line removes the 'npm ERR! missing:' messages. For reference, see + # this thread: https://github.com/npm/npm/issues/19393#issuecomment- + # 374076889. + subprocess.call(('%s/bin/npm dedupe' % common.NODE_PATH).split()) + install_skulpt() # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') subprocess.call('python -m scripts.pre_commit_hook --install'.split()) From d2ed087c9c9ebf323e7ec2f37c3184ece9c07ff9 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 02:30:33 +0530 Subject: [PATCH 028/141] convert --- scripts/common.py | 16 +++++ scripts/run_backend_tests.py | 7 +- scripts/run_frontend_tests.py | 7 +- scripts/run_presubmit_checks.py | 110 ++++++++++++++++++++++++++++++++ 4 files changed, 134 insertions(+), 6 deletions(-) create mode 100644 scripts/run_presubmit_checks.py diff --git a/scripts/common.py b/scripts/common.py index a2f60dae9b75..7a78b9cf45e5 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -38,6 +38,7 @@ GOOGLE_CLOUD_SDK_HOME = os.path.join( OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/google-cloud-sdk') NODE_PATH = os.path.join(OPPIA_TOOLS_DIR, 'node-10.15.3') +FRONTEND_DIR = 'core/templates/dev/head' def ensure_directory_exists(d): @@ -209,6 +210,21 @@ def kill_process(port): process.send_signal(signal.SIGTERM) +def run_command(command): + """Runs a subprocess command. + + Args: + command: str. The command to be run. + + Returns: + str. The command output. + + Raises: + CalledProcessError. Raised when the command fails. + """ + return subprocess.check_output(command.split()) + + class CD(python_utils.OBJECT): """Context manager for changing the current working directory.""" diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 48938c1be9f6..de68ada3b9ea 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -59,6 +59,7 @@ import argparse import os import subprocess +import sys import python_utils @@ -76,7 +77,7 @@ action='store_true') -def main(): +def main(argv): """Runs the backend tests.""" setup.main() setup_gae.main() @@ -87,7 +88,7 @@ def main(): coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') coverage_path = os.path.join(coverage_home, 'coverage') - parsed_args, _ = _PARSER.parse_known_args() + parsed_args, _ = _PARSER.parse_known_args(args=argv) if parsed_args.generate_coverage_report: python_utils.PRINT( 'Checking whether coverage is installed in %s' @@ -126,4 +127,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 2d7e0259771d..8391787f764a 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -32,6 +32,7 @@ import argparse import os import subprocess +import sys import python_utils @@ -51,14 +52,14 @@ action='store_true') -def main(): +def main(argv): """Runs the frontend tests.""" setup.main() setup_gae.main() xvfb_prefix = '' if os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): xvfb_prefix = '/usr/bin/xvfb-run' - parsed_args = _PARSER.parse_args() + parsed_args = _PARSER.parse_args(args=argv) setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) python_utils.PRINT('') @@ -98,4 +99,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py new file mode 100644 index 000000000000..5b648931bd90 --- /dev/null +++ b/scripts/run_presubmit_checks.py @@ -0,0 +1,110 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""INSTRUCTIONS: + +Run this script from the oppia root folder prior to opening a PR: + python -m scripts.run_presubmit_checks + +It runs the following tests in all cases. +- Javascript and Python Linting +- Backend Python tests + +Only when frontend files are changed will it run Frontend Karma unit tests. + +If any of these tests result in errors, this script will terminate. + +Note: The test scripts are arranged in increasing order of time taken. This +enables a broken build to be detected as quickly as possible. + +===================== +CUSTOMIZATION OPTIONS +===================== + +Set the origin branch to compare against by adding + + --branch=your_branch or -b=your_branch + +By default, if the current branch tip exists on remote origin, +the current branch is compared against its tip on GitHub. +Otherwise it's compared against 'develop'. +""" +from __future__ import absolute_import # pylint: disable=import-only-modules + +import argparse +import sys + +import python_utils + +from . import common +from . import pre_commit_linter +from . import run_backend_tests +from . import run_frontend_tests + +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--branch', '-b', + help='optional; if specified, the origin branch to compare against.') + + +def main(argv): + """Run the presubmit checks.""" + + # Run Javascript and Python linters. + python_utils.PRINT('Linting files since the last commit') + pre_commit_linter.main() + python_utils.PRINT('Linting passed.') + python_utils.PRINT('') + + current_branch = common.run_command('git rev-parse --abbrev-ref HEAD') + + # If the current branch exists on remote origin, matched_branch_num=1 + # else matched_branch_num=0. + matched_branch_num = common.run_command( + 'git ls-remote --heads origin %s | wc -l' % current_branch) + + # Set the origin branch to develop if it's not specified. + parsed_args, _ = _PARSER.parse_known_args(args=argv) + if parsed_args.branch: + branch = parsed_args.branch + elif matched_branch_num == '1': + branch = 'origin/%s' % current_branch + else: + branch = 'develop' + + python_utils.PRINT('Comparing the current branch with %s' % branch) + + all_changed_files = common.run_command( + 'git diff --cached --name-only --diff-filter=ACM %s' % branch) + + if common.FRONTEND_DIR in all_changed_files: + # Run frontend unit tests. + python_utils.PRINT('Running frontend unit tests') + run_frontend_tests.main(['--run_minified_tests']) + python_utils.PRINT('Frontend tests passed.') + python_utils.PRINT('') + else: + # If files in common.FRONTEND_DIR were not changed, skip the tests. + python_utils.PRINT('No frontend files were changed.') + python_utils.PRINT('Skipped frontend tests') + + # Run backend tests. + python_utils.PRINT('Running backend tests') + run_backend_tests.main([]) + python_utils.PRINT('Backend tests passed.') + python_utils.PRINT('') + + +if __name__ == '__main__': + main(sys.argv) From 1ced71cb26ea5e78347de28ef115d34fb93ecf86 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 02:32:08 +0530 Subject: [PATCH 029/141] convert --- scripts/run_presubmit_checks.sh | 110 -------------------------------- 1 file changed, 110 deletions(-) delete mode 100644 scripts/run_presubmit_checks.sh diff --git a/scripts/run_presubmit_checks.sh b/scripts/run_presubmit_checks.sh deleted file mode 100644 index 32e5dd800122..000000000000 --- a/scripts/run_presubmit_checks.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2015 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# Run this script from the oppia root folder prior to opening a PR: -# bash scripts/run_presubmit_checks.sh -# -# It runs the following tests in all cases. -# - Javascript and Python Linting -# - Backend Python tests -# -# Only when frontend files are changed will it run Frontend Karma unit tests. -# -# If any of these tests result in errors, this script will terminate. -# -# Note: The test scripts are arranged in increasing order of time taken. This -# enables a broken build to be detected as quickly as possible. -# -# ===================== -# CUSTOMIZATION OPTIONS -# ===================== -# -# Set the origin branch to compare against by adding -# -# --branch=your_branch or -b=your_branch -# -# By default, if the current branch tip exists on remote origin, -# the current branch is compared against its tip on GitHub. -# Otherwise it's compared against 'develop'. - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -# Run Javascript and Python linters. -echo 'Linting files since the last commit' -python -m $(dirname $0).pre_commit_linter || exit 1 -echo 'Linting passed.' -echo '' - -# Read arguments from the command line. -for i in "$@" -do -case $i in - -b=*|--branch=*) - ORIGIN_BRANCH=${i#*=} - shift - ;; -esac -done - -CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) -# If the current branch exists on remote origin, MATCHED_BRANCH_NUM=1 -# else MATCHED_BRANCH_NUM=0 -MATCHED_BRANCH_NUM=$(git ls-remote --heads origin $CURRENT_BRANCH | wc -l) -# Set the origin branch to develop if it's not specified. -if [ -n "$ORIGIN_BRANCH" ]; then - BRANCH=$ORIGIN_BRANCH -elif [ $MATCHED_BRANCH_NUM == 1 ]; then - BRANCH=origin/$CURRENT_BRANCH -else - BRANCH=develop -fi - -FRONTEND_DIR='core/templates/dev/head' - -echo "Comparing the current branch with $BRANCH" - -if [ -n "$(git diff --cached --name-only --diff-filter=ACM ${BRANCH} | grep ${FRONTEND_DIR})" ] -then - # Run frontend unit tests. - echo 'Running frontend unit tests' - source $(dirname $0)/run_frontend_tests.sh --run-minified-tests=true || exit 1 - echo 'Frontend tests passed.' - echo '' -else - # If files in FRONTEND_DIR were not changed, skip the tests. - echo 'No frontend files were changed.' - echo 'Skipped frontend tests' -fi - - -# Run backend tests. -echo 'Running backend tests' -source $(dirname $0)/run_backend_tests.sh || exit 1 -echo 'Backend tests passed.' -echo '' From 60510aea717650026b477d034b8f83e7de0e9d5c Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 03:26:08 +0530 Subject: [PATCH 030/141] fix lint --- scripts/backend_tests.py | 6 +++--- scripts/build.py | 7 ++++--- scripts/build_test.py | 6 +++--- scripts/create_expression_parser.py | 2 +- scripts/install_chrome_on_travis.py | 8 +++++--- scripts/install_third_party_libs.py | 16 +++++++++------- scripts/pre_commit_hook.py | 6 +++--- scripts/pre_push_hook.py | 6 +++--- scripts/run_backend_tests.py | 4 ++-- scripts/run_e2e_tests.py | 10 +++++----- scripts/run_frontend_tests.py | 6 ++---- scripts/run_performance_tests.py | 6 +++--- scripts/run_tests.py | 6 +++--- scripts/setup.py | 4 ++-- scripts/start.py | 17 +++++++++-------- scripts/vagrant_lock.py | 6 +++--- 16 files changed, 60 insertions(+), 56 deletions(-) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index b70e74513224..d1928a459f2d 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -297,7 +297,7 @@ def _get_test_target_classes(path): return result -def main(): +def main(argv): """Run the tests.""" for directory in DIRS_TO_ADD_TO_SYS_PATH: if not os.path.exists(os.path.dirname(directory)): @@ -307,7 +307,7 @@ def main(): import dev_appserver dev_appserver.fix_sys_path() - parsed_args = _PARSER.parse_args() + parsed_args = _PARSER.parse_args(args=argv) if parsed_args.test_target and parsed_args.test_path: raise Exception('At most one of test_path and test_target ' 'should be specified.') @@ -448,4 +448,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/build.py b/scripts/build.py index d06bbeeab29f..a55785d7cc81 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -25,6 +25,7 @@ import re import shutil import subprocess +import sys import threading import python_utils @@ -1329,7 +1330,7 @@ def compile_typescript_files_continuously(project_dir): return -def build(): +def build(argv): """The main method of this script. Creates a third-party directory where all the JS and CSS dependencies are @@ -1346,7 +1347,7 @@ def build(): '--enable_watcher', action='store_true', default=False) # We use parse_known_args() to ignore the extra arguments which maybe used # while calling this method from other Python scripts. - options, _ = parser.parse_known_args() + options, _ = parser.parse_known_args(args=argv) # Regenerate /third_party/generated from scratch. safe_delete_directory_tree(THIRD_PARTY_GENERATED_DEV_DIR) build_third_party_libs(THIRD_PARTY_GENERATED_DEV_DIR) @@ -1372,4 +1373,4 @@ def build(): # The 'no coverage' pragma is used as this line is un-testable. This is because # it will only be called when build.py is used as a script. if __name__ == '__main__': # pragma: no cover - build() + build(sys.argv) diff --git a/scripts/build_test.py b/scripts/build_test.py index df9cdef6a0c8..bedf5eca92ca 100644 --- a/scripts/build_test.py +++ b/scripts/build_test.py @@ -909,7 +909,7 @@ def mock_compare_file_count(unused_first_dir, unused_second_dir): with ensure_files_exist_swap, build_using_webpack_swap, ( compile_typescript_files_swap), compare_file_count_swap, args_swap: - build.build() + build.build([]) self.assertEqual(check_function_calls, expected_check_function_calls) @@ -939,7 +939,7 @@ def mock_compile_typescript_files_continuously(unused_project_dir): with ensure_files_exist_swap, ( compile_typescript_files_continuously_swap), args_swap: - build.build() + build.build([]) self.assertEqual(check_function_calls, expected_check_function_calls) @@ -971,7 +971,7 @@ def mock_compile_typescript_files(unused_project_dir): with ensure_files_exist_swap, compile_typescript_files_swap, ( assert_raises_regexp_context_manager), args_swap: - build.build() + build.build([]) self.assertEqual(check_function_calls, expected_check_function_calls) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 0b7943a4a00e..2cb1eb0ed145 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -42,7 +42,7 @@ def main(): 'core/templates/dev/head/expressions/ExpressionParserService.js') # Install the basic environment, e.g. nodejs. - install_third_party_libs.main() + install_third_party_libs.main([]) python_utils.PRINT( 'Checking whether pegjs is installed in %s' % common.OPPIA_TOOLS_DIR) diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py index 578a43a61aa6..d9d47e69acbf 100644 --- a/scripts/install_chrome_on_travis.py +++ b/scripts/install_chrome_on_travis.py @@ -18,10 +18,12 @@ settings. It can be found under 'Environment Variables' header here: https://travis-ci.org/oppia/oppia/settings. """ +from __future__ import absolute_import # pylint: disable=import-only-modules import os import subprocess -import urllib + +import python_utils def main(): @@ -38,11 +40,11 @@ def main(): # problem. os.makedirs(os.path.join(home_directory, '.cache/TravisChrome/')) os.chdir(os.path.join(home_directory, '.cache/TravisChrome/')) - urllib.urlretrieve( + python_utils.url_retrieve( chrome_source_url, filename=os.path.basename(chrome_source_url)) os.chdir(oppia_dir) - print 'Installing %s' % travis_chrome_path + python_utils.PRINT('Installing %s' % travis_chrome_path) subprocess.call(('sudo dpkg -i %s' % travis_chrome_path).split()) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index bae65b6044d0..759602b8daf6 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -27,6 +27,8 @@ from . import common from . import install_third_party +from . import pre_commit_hook +from . import pre_push_hook from . import setup _PARSER = argparse.ArgumentParser() @@ -102,7 +104,7 @@ def pip_install(package, version, install_path): 'install', '%s==%s' % (package, version), '--target', install_path]) -def install_skulpt(): +def install_skulpt(argv): """Download and install Skulpt. Skulpt is built using a Python script included within the Skulpt repository (skulpt.py). This script normally requires GitPython, however the patches to it below @@ -115,7 +117,7 @@ def install_skulpt(): # We use parse_known_args() to ignore the extra arguments which maybe used # while calling this method from other Python scripts. - parsed_args, _ = _PARSER.parse_known_args() + parsed_args, _ = _PARSER.parse_known_args(args=argv) no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt python_utils.PRINT('Checking whether Skulpt is installed in third_party') @@ -196,7 +198,7 @@ def install_skulpt(): os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) -def main(): +def main(argv): """Install third-party libraries for Oppia.""" pip_dependencies = [ ('future', '0.17.1', common.THIRD_PARTY_DIR), @@ -236,15 +238,15 @@ def main(): # 374076889. subprocess.call(('%s/bin/npm dedupe' % common.NODE_PATH).split()) - install_skulpt() + install_skulpt(argv) # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') - subprocess.call('python -m scripts.pre_commit_hook --install'.split()) + pre_commit_hook.main(['--install']) # Install pre-push script. python_utils.PRINT('Installing pre-push hook for git') - subprocess.call('python -m scripts.pre_push_hook --install'.split()) + pre_push_hook.main(['--install']) if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index f1aa322ef2ac..dd585fcaaa2f 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -125,14 +125,14 @@ def _revert_changes_in_package_lock_file(): raise ValueError(err_unstage_cmd) -def main(): +def main(argv): """Main method for pre-commit hook that checks files added/modified in a commit. """ parser = argparse.ArgumentParser() parser.add_argument('--install', action='store_true', default=False, help='Install pre_commit_hook to the .git/hooks dir') - args = parser.parse_args() + args, _ = parser.parse_known_args(args=argv) if args.install: _install_hook() sys.exit(0) @@ -149,4 +149,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/pre_push_hook.py b/scripts/pre_push_hook.py index 004f1d39cbc5..77beb267c3ad 100755 --- a/scripts/pre_push_hook.py +++ b/scripts/pre_push_hook.py @@ -355,7 +355,7 @@ def _does_diff_include_package_json(files_to_lint): return False -def main(): +def main(argv): """Main method for pre-push hook that executes the Python/JS linters on all files that deviate from develop. """ @@ -364,7 +364,7 @@ def main(): parser.add_argument('url', nargs='?', help='provided by git before push') parser.add_argument('--install', action='store_true', default=False, help='Install pre_push_hook to the .git/hooks dir') - args = parser.parse_args() + args, _ = parser.parse_known_args(args=argv) if args.install: _install_hook() sys.exit(0) @@ -407,4 +407,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index de68ada3b9ea..5886540b5b6c 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -109,8 +109,8 @@ def main(argv): 'node_modules/webpack/bin/webpack.js --config webpack.prod.config.ts' .split()) - build.build() - backend_tests.main() + build.build([]) + backend_tests.main([]) if parsed_args.generate_coverage_report: subprocess.call(('python %s combine' % coverage_path).split()) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 85a6287f6415..59591dcc10ae 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -115,14 +115,14 @@ def cleanup(): python_utils.PRINT('Done!') -def main(): +def main(argv): """Runs the end to end tests.""" setup.main() setup_gae.main() if os.environ.get('TRAVIS'): install_chrome_on_travis.main() - parsed_args = _PARSER.parse_args() + parsed_args = _PARSER.parse_args(args=argv) setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -149,7 +149,7 @@ def main(): python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - subprocess.call('python -m scripts.build --prod_env'.split()) + build.build(['--prod_env']) app_yaml_filepath = 'app.yaml' else: constants_env_variable = '\'DEV_MODE\': true' @@ -158,7 +158,7 @@ def main(): python_utils.PRINT( re.sub('\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.build() + build.build([]) app_yaml_filepath = 'app_dev.yaml' # Start a selenium server using chromedriver 2.41. @@ -235,4 +235,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 8391787f764a..b4349ccad45e 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -74,7 +74,7 @@ def main(argv): python_utils.PRINT('Running test in development environment') python_utils.PRINT('') - build.build() + build.build([]) start_tests_cmd = ( '%s node_modules/karma/bin/karma start core/tests/karma.conf.ts' @@ -86,9 +86,7 @@ def main(argv): python_utils.PRINT('Running test in production environment') python_utils.PRINT('') - subprocess.call( - 'python -m scripts.build --prod_env --minify_third_party_libs_only' - .split()) + build.build(['--prod_env', '--minify_third_party_libs_only']) start_tests_cmd = ( '%s node_modules/karma/bin/karma start ' diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index e239c1a5efad..0e64188cc9ac 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -82,12 +82,12 @@ def run_performance_test(test_name, xvfb_prefix): % (xvfb_prefix, test_name)).split()) -def main(): +def main(argv): """Main function to run the performance tests.""" setup.main() setup_gae.main() - parsed_args = _PARSER.parse_args() + parsed_args = _PARSER.parse_args(args=argv) setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -155,4 +155,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 531077bc0259..edc25a84d18a 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -45,19 +45,19 @@ def main(): # Run frontend unit tests. python_utils.PRINT('Running frontend unit tests') - run_frontend_tests.main() + run_frontend_tests.main([]) python_utils.PRINT('Frontend tests passed.') python_utils.PRINT('') # Run backend tests. python_utils.PRINT('Running backend tests') - run_backend_tests.main() + run_backend_tests.main([]) python_utils.PRINT('Backend tests passed.') python_utils.PRINT('') # Run end-to-end tests. python_utils.PRINT('Running end-to-end tests') - run_e2e_tests.main() + run_e2e_tests.main([]) python_utils.PRINT('') python_utils.PRINT( diff --git a/scripts/setup.py b/scripts/setup.py index 30b2c3e2d950..ab4de5668c37 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -64,14 +64,14 @@ def maybe_install_dependencies( python_utils.PRINT('') python_utils.PRINT('Running build task with concatenation only') python_utils.PRINT('') - build.build() + build.build([]) if run_minified_tests is True: python_utils.PRINT('') python_utils.PRINT( 'Running build task with concatenation and minification') python_utils.PRINT('') - subprocess.call('python -m scripts.build --prod_env'.split()) + build.build(['--prod_env']) # This function takes a command for python as its only input. diff --git a/scripts/start.py b/scripts/start.py index 36d10ba76a48..85605b22306a 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -32,10 +32,12 @@ import os import re import subprocess +import sys import time import python_utils +from . import build from . import common from . import install_third_party_libs from . import setup @@ -71,10 +73,10 @@ def cleanup(): time.sleep(1) -def main(): +def main(argv): """Starts up a development server running Oppia.""" if os.path.isfile('/etc/is_vagrant_vm'): - vagrant_lock.main() + vagrant_lock.main([]) setup.main() setup_gae.main() @@ -83,7 +85,7 @@ def main(): atexit.register(cleanup) # Install third party dependencies. - install_third_party_libs.main() + install_third_party_libs.main([]) python_utils.PRINT('Oppia setup complete!') @@ -96,7 +98,7 @@ def main(): python_utils.PRINT('running at port 8181.') python_utils.PRINT('') - parsed_args = _PARSER.parse_args() + parsed_args = _PARSER.parse_args(args=argv) clear_datastore_arg = ( '' if parsed_args.save_datastore else '--clear_datastore=true') enable_console_arg = ( @@ -109,8 +111,7 @@ def main(): python_utils.PRINT( re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') - subprocess.call( - 'python -m scripts.build --prod_env --enable_watcher'.split()) + build.build(['--prod_env', '--enable_watcher']) app_yaml_filepath = 'app.yaml' else: constants_env_variable = '\'DEV_MODE\': true' @@ -119,7 +120,7 @@ def main(): python_utils.PRINT( re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') - subprocess.call('python -m scripts.build --enable_watcher'.split()) + build.build(['--enable_watcher']) app_yaml_filepath = 'app_dev.yaml' # Set up a local dev instance. @@ -206,4 +207,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py index fed9b7c02388..b60d9071e0f2 100644 --- a/scripts/vagrant_lock.py +++ b/scripts/vagrant_lock.py @@ -30,7 +30,7 @@ from . import clean -def main(): +def main(argv): """Creates a lockfile.""" vagrant_lock_file = './.lock' @@ -39,7 +39,7 @@ def main(): '--nolock', help='optional; if specified, skips creation of lockfile', action='store_true') - parsed_args = _parser.parse_args() + parsed_args = _parser.parse_args(args=argv) if parsed_args.nolock: clean.delete_file(vagrant_lock_file) sys.exit(0) @@ -62,4 +62,4 @@ def main(): if __name__ == '__main__': - main() + main(sys.argv) From f83f917a25255b5dabb4dd2d718c29a49601f660 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 03:50:59 +0530 Subject: [PATCH 031/141] update circleci --- .circleci/config.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 164e6ab7af66..f8722942be4f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,8 +15,8 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - source scripts/setup.sh || exit 1 - source scripts/setup_gae.sh || exit 1 + python -m scripts.setup + python -m scripts.setup_gae anchor_for_installing_cc_test_reporter: &install_cc name: Install codeclimate test reporter @@ -60,7 +60,7 @@ jobs: # pre_commit_linter and third_party_size_check need to import other # Python files and that is only possible if we treat it as a module. command: | - bash scripts/install_third_party.sh + python -m scripts.install_third_party_libs python -m scripts.third_party_size_check python -m scripts.pre_commit_linter --path=. --verbose @@ -74,7 +74,7 @@ jobs: - run: name: Run typescript tests command: | - bash scripts/install_third_party.sh + python -m scripts.install_third_party_libs python -m scripts.typescript_checks frontend_tests: @@ -88,7 +88,8 @@ jobs: - run: name: Run frontend tests command: | - bash -x scripts/run_frontend_tests.sh --run-minified-tests=true + python -m scripts.run_frontend_tests --run_minified_tests + - run: name: Generate frontend coverage report command: | @@ -107,7 +108,7 @@ jobs: - run: name: Run backend tests command: | - bash scripts/run_backend_tests.sh --generate_coverage_report --exclude_load_tests + python -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests - run: <<: *install_cc - run: From fb263969664ad7888e87bf4f2bc5fa9102179014 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 03:51:50 +0530 Subject: [PATCH 032/141] fix --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f8722942be4f..5b69ec8ae059 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -89,7 +89,6 @@ jobs: name: Run frontend tests command: | python -m scripts.run_frontend_tests --run_minified_tests - - run: name: Generate frontend coverage report command: | From d0a9f565449a0e82509227ba3fd68d126b480b00 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 03:55:28 +0530 Subject: [PATCH 033/141] convert --- .travis.yml | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/.travis.yml b/.travis.yml index dd48769a1c5e..e110b2607883 100644 --- a/.travis.yml +++ b/.travis.yml @@ -85,9 +85,9 @@ before_install: install: - pushd $TRAVIS_BUILD_DIR -- source scripts/setup.sh || exit 1 -- source scripts/setup_gae.sh || exit 1 -- bash scripts/install_third_party.sh +- python -m scripts.setup +- python -m scripts.setup_gae +- python -m scripts.install_third_party_libs script: # These lines are commented out because these checks are being run on CircleCI @@ -101,28 +101,28 @@ script: # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'true' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh --generate_coverage_report; fi # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'false' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh; fi # Run the e2e tests in the production environment (using --prod_env). -- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="accessibility" --prod_env; fi -- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="additionalEditorAndPlayerFeatures" --prod_env; fi -- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="collections" --prod_env; fi -- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="coreEditorAndPlayerFeatures" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="embedding" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationFeedbackTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationHistoryTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationStatisticsTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationTranslationTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="extensions" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learnerDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learner" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="library" --prod_env; fi -- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="navigation" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="preferences" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="profileMenu" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="publication" --prod_env; fi -- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="skillEditor" --prod_env; fi -- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="subscriptions" --prod_env; fi -- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicsAndSkillsDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicAndStoryEditor" --prod_env; fi -- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="users" --prod_env; fi +- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="accessibility" --prod_env; fi +- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="additionalEditorAndPlayerFeatures" --prod_env; fi +- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="collections" --prod_env; fi +- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="coreEditorAndPlayerFeatures" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="embedding" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationFeedbackTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationHistoryTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationStatisticsTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationTranslationTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="extensions" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learnerDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learner" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="library" --prod_env; fi +- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="navigation" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="preferences" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="profileMenu" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="publication" --prod_env; fi +- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="skillEditor" --prod_env; fi +- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="subscriptions" --prod_env; fi +- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicsAndSkillsDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicAndStoryEditor" --prod_env; fi +- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="users" --prod_env; fi # These lines are commented out because these checks are being run on CircleCI # here: https://circleci.com/gh/oppia/oppia From df0c8800d0d084bfec926ad37b7e57fe2d6815fa Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 04:03:45 +0530 Subject: [PATCH 034/141] fix --- .circleci/config.yml | 1 + .travis.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5b69ec8ae059..ccba0415cd92 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,6 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | + python -m scripts.install_third_party_libs python -m scripts.setup python -m scripts.setup_gae diff --git a/.travis.yml b/.travis.yml index e110b2607883..c73bd28b277c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -85,9 +85,9 @@ before_install: install: - pushd $TRAVIS_BUILD_DIR +- python -m scripts.install_third_party_libs - python -m scripts.setup - python -m scripts.setup_gae -- python -m scripts.install_third_party_libs script: # These lines are commented out because these checks are being run on CircleCI From af66d9559de274eb81574b534103c720daf1b88f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 04:08:30 +0530 Subject: [PATCH 035/141] fix --- .circleci/config.yml | 2 +- .travis.yml | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ccba0415cd92..b42f1092c50f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - python -m scripts.install_third_party_libs + sudo pip install future python -m scripts.setup python -m scripts.setup_gae diff --git a/.travis.yml b/.travis.yml index c73bd28b277c..2d59506e19c1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -82,12 +82,13 @@ before_install: - export CHROME_BIN=/usr/bin/google-chrome-stable - export DISPLAY=:99.0 - bash -e /etc/init.d/xvfb start +- pip install future install: - pushd $TRAVIS_BUILD_DIR -- python -m scripts.install_third_party_libs - python -m scripts.setup - python -m scripts.setup_gae +- python -m scripts.install_third_party_libs script: # These lines are commented out because these checks are being run on CircleCI From 48c8ce890fe37e6e82c305c4a74d11c2037c1842 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 04:10:53 +0530 Subject: [PATCH 036/141] fix --- .circleci/config.yml | 1 + .travis.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index b42f1092c50f..c552c8319cbe 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,6 +16,7 @@ anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | sudo pip install future + sudo pip install psutil python -m scripts.setup python -m scripts.setup_gae diff --git a/.travis.yml b/.travis.yml index 2d59506e19c1..00289f04bc1b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -83,6 +83,7 @@ before_install: - export DISPLAY=:99.0 - bash -e /etc/init.d/xvfb start - pip install future +- pip install psutil install: - pushd $TRAVIS_BUILD_DIR From fcbbbb03c4cdc4291ba19b1b168742622918b22a Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 04:19:07 +0530 Subject: [PATCH 037/141] fix --- .circleci/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index c552c8319cbe..67bd074327f7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -70,6 +70,8 @@ jobs: <<: *job_defaults steps: - checkout + - run: + <<: *install_dependencies - run: date +%F > date - restore_cache: <<: *restore_cache @@ -83,6 +85,8 @@ jobs: <<: *job_defaults steps: - checkout + - run: + <<: *install_dependencies - run: date +%F > date - restore_cache: <<: *restore_cache @@ -102,6 +106,8 @@ jobs: <<: *job_defaults steps: - checkout + - run: + <<: *install_dependencies - run: date +%F > date - restore_cache: <<: *restore_cache From d3f80a33de40ba4afcb5136049cc1869189e4d66 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 05:01:54 +0530 Subject: [PATCH 038/141] sync with develop --- scripts/run_backend_tests.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 5886540b5b6c..0bbd67b259a9 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -100,16 +100,11 @@ def main(argv): 'coverage', '4.5.4', os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4')) - # Compile typescript files. - python_utils.PRINT('Compiling typescript...') - subprocess.call('node_modules/typescript/bin/tsc --project .'.split()) - python_utils.PRINT('Compiling webpack...') subprocess.call( 'node_modules/webpack/bin/webpack.js --config webpack.prod.config.ts' .split()) - build.build([]) backend_tests.main([]) if parsed_args.generate_coverage_report: From 740eb7aa53855824bda5132d0761f938a18641f7 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 05:27:10 +0530 Subject: [PATCH 039/141] fix --- scripts/install_third_party_libs.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 759602b8daf6..2c2f34ad6d65 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -101,7 +101,8 @@ def pip_install(package, version, install_path): else: import pip._internal pip._internal.main(args=[ # pylint: disable=protected-access - 'install', '%s==%s' % (package, version), '--target', install_path]) + 'install', '%s==%s' % (package, version), '--target', install_path, + '--user']) def install_skulpt(argv): @@ -177,7 +178,7 @@ def install_skulpt(argv): temp_file_content = target_stdout.getvalue() with python_utils.open_file(tmp_file, 'w') as f: - f.write(temp_file_content) + f.write(python_utils.STR(temp_file_content)) shutil.move( tmp_file, os.path.join( From 2834e827a41476ca2cf230e8251b1d6e31a424af Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 26 Aug 2019 23:44:51 +0530 Subject: [PATCH 040/141] use parse known args --- scripts/backend_tests.py | 2 +- scripts/run_e2e_tests.py | 2 +- scripts/run_frontend_tests.py | 2 +- scripts/run_performance_tests.py | 2 +- scripts/start.py | 2 +- scripts/vagrant_lock.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index d1928a459f2d..789e95294d1a 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -307,7 +307,7 @@ def main(argv): import dev_appserver dev_appserver.fix_sys_path() - parsed_args = _PARSER.parse_args(args=argv) + parsed_args, _ = _PARSER.parse_known_args(args=argv) if parsed_args.test_target and parsed_args.test_path: raise Exception('At most one of test_path and test_target ' 'should be specified.') diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 76e85afd20b4..03786ff881e8 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -122,7 +122,7 @@ def main(argv): if os.environ.get('TRAVIS'): install_chrome_on_travis.main() - parsed_args = _PARSER.parse_args(args=argv) + parsed_args, _ = _PARSER.parse_known_args(args=argv) setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index b4349ccad45e..1b74fbc7c69c 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -59,7 +59,7 @@ def main(argv): xvfb_prefix = '' if os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): xvfb_prefix = '/usr/bin/xvfb-run' - parsed_args = _PARSER.parse_args(args=argv) + parsed_args, _ = _PARSER.parse_known_args(args=argv) setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) python_utils.PRINT('') diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 0e64188cc9ac..54c156de7772 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -87,7 +87,7 @@ def main(argv): setup.main() setup_gae.main() - parsed_args = _PARSER.parse_args(args=argv) + parsed_args, _ = _PARSER.parse_known_args(args=argv) setup.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) diff --git a/scripts/start.py b/scripts/start.py index 92814e85aa08..0c51508e5a5d 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -98,7 +98,7 @@ def main(argv): python_utils.PRINT('running at port 8181.') python_utils.PRINT('') - parsed_args = _PARSER.parse_args(args=argv) + parsed_args, _ = _PARSER.parse_known_args(args=argv) clear_datastore_arg = ( '' if parsed_args.save_datastore else '--clear_datastore=true') enable_console_arg = ( diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py index b60d9071e0f2..7a81600cbe9f 100644 --- a/scripts/vagrant_lock.py +++ b/scripts/vagrant_lock.py @@ -39,7 +39,7 @@ def main(argv): '--nolock', help='optional; if specified, skips creation of lockfile', action='store_true') - parsed_args = _parser.parse_args(args=argv) + parsed_args, _ = _parser.parse_known_args(args=argv) if parsed_args.nolock: clean.delete_file(vagrant_lock_file) sys.exit(0) From 8bbda9674c75a97ccd30ab444c13c1a17466bb9d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 00:25:39 +0530 Subject: [PATCH 041/141] remove user --- scripts/install_third_party_libs.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 2c2f34ad6d65..dc3a08201f75 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -101,8 +101,7 @@ def pip_install(package, version, install_path): else: import pip._internal pip._internal.main(args=[ # pylint: disable=protected-access - 'install', '%s==%s' % (package, version), '--target', install_path, - '--user']) + 'install', '%s==%s' % (package, version), '--target', install_path]) def install_skulpt(argv): From cd84f0b0af430daabdd8b72342e62a2a1e465466 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 06:12:10 +0530 Subject: [PATCH 042/141] fix pip --- scripts/install_third_party_libs.py | 6 ++---- scripts/run_backend_tests.py | 4 ++-- scripts/setup.py | 3 ++- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index dc3a08201f75..124fd93dfec7 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -29,7 +29,6 @@ from . import install_third_party from . import pre_commit_hook from . import pre_push_hook -from . import setup _PARSER = argparse.ArgumentParser() _PARSER.add_argument( @@ -101,7 +100,8 @@ def pip_install(package, version, install_path): else: import pip._internal pip._internal.main(args=[ # pylint: disable=protected-access - 'install', '%s==%s' % (package, version), '--target', install_path]) + 'install', '%s==%s' % (package, version), '--target', install_path, + '--system']) def install_skulpt(argv): @@ -224,8 +224,6 @@ def main(argv): python_utils.PRINT('Installing %s' % package) pip_install(package, version, exact_lib_path) - setup.main() - # Download and install required JS and zip files. python_utils.PRINT('Installing third-party JS libraries and zip files.') install_third_party.main() diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index f3dc3b0661ea..fbf8b4bbbe09 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -83,7 +83,7 @@ def main(argv): setup_gae.main() # Install third party dependencies. - subprocess.call('bash scripts/install_third_party.sh'.split()) + install_third_party_libs.main([]) coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') coverage_path = os.path.join(coverage_home, 'coverage') @@ -102,7 +102,7 @@ def main(argv): build.build([]) - # Compile typescript files + # Compile typescript files. python_utils.PRINT('Compiling typescript...') subprocess.call( 'node_modules/typescript/bin/tsc --project .'.split()) diff --git a/scripts/setup.py b/scripts/setup.py index ab4de5668c37..536ee2bd2e7d 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -27,6 +27,7 @@ from . import build from . import common +from . import install_third_party_libs def delete_directory_tree(directory_path): @@ -58,7 +59,7 @@ def maybe_install_dependencies( """Parse additional command line arguments.""" if skip_installing_third_party_libs is False: # Install third party dependencies. - subprocess.call('bash scripts/install_third_party.sh'.split()) + install_third_party_libs.main([]) # Ensure that generated JS and CSS files are in place before running the # tests. python_utils.PRINT('') From 2c03e242a20b6725c32fccbf3374fd8f8d388eff Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 17:41:24 +0530 Subject: [PATCH 043/141] try subprocess on pip install --- scripts/install_third_party_libs.py | 16 +++++++++------- scripts/pre_commit_hook.py | 4 ++-- scripts/pre_push_hook.py | 4 ++-- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 124fd93dfec7..e41e84cd4b1f 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -95,13 +95,15 @@ def pip_install(package, version, install_path): 'Windows%29') sys.exit(1) - if hasattr(pip, 'main'): - pip.main(['install', package]) - else: - import pip._internal - pip._internal.main(args=[ # pylint: disable=protected-access - 'install', '%s==%s' % (package, version), '--target', install_path, - '--system']) + subprocess.call(( + 'pip install %s==%s --target=%s' + % (package, version, install_path)).split()) + # if hasattr(pip, 'main'): + # pip.main(['install', package]) + # else: + # import pip._internal + # pip._internal.main(args=[ # pylint: disable=protected-access + # 'install', '%s==%s' % (package, version), '--target', install_path]) def install_skulpt(argv): diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index dd585fcaaa2f..061ad6ad1038 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -135,7 +135,7 @@ def main(argv): args, _ = parser.parse_known_args(args=argv) if args.install: _install_hook() - sys.exit(0) + return python_utils.PRINT('Running pre-commit check for package-lock.json ...') if _does_diff_include_package_lock_file_and_no_package_file(): @@ -145,7 +145,7 @@ def main(argv): 'package-lock.json will be automatically reverted.') python_utils.PRINT('Reverting changes in package-lock.json ...') _revert_changes_in_package_lock_file() - sys.exit(0) + return if __name__ == '__main__': diff --git a/scripts/pre_push_hook.py b/scripts/pre_push_hook.py index 77beb267c3ad..42864814aec1 100755 --- a/scripts/pre_push_hook.py +++ b/scripts/pre_push_hook.py @@ -367,7 +367,7 @@ def main(argv): args, _ = parser.parse_known_args(args=argv) if args.install: _install_hook() - sys.exit(0) + return remote = _get_remote_name() remote = remote if remote else args.remote @@ -403,7 +403,7 @@ def main(argv): python_utils.PRINT( 'Push aborted due to failing frontend tests.') sys.exit(1) - sys.exit(0) + return if __name__ == '__main__': From cd4a8246462f21150dd73dc75fa750fe2f858423 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 17:53:32 +0530 Subject: [PATCH 044/141] fix --- scripts/install_third_party_libs.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index e41e84cd4b1f..2bc95e9c835d 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -95,15 +95,15 @@ def pip_install(package, version, install_path): 'Windows%29') sys.exit(1) - subprocess.call(( - 'pip install %s==%s --target=%s' - % (package, version, install_path)).split()) - # if hasattr(pip, 'main'): - # pip.main(['install', package]) - # else: - # import pip._internal - # pip._internal.main(args=[ # pylint: disable=protected-access - # 'install', '%s==%s' % (package, version), '--target', install_path]) + # For pip version < 10. + if hasattr(pip, 'main'): + pip.main(args=[ + 'install', '%s==%s' % (package, version), '--target', install_path]) + # For pip version > 10. + else: + import pip._internal + pip._internal.main(args=[ # pylint: disable=protected-access + 'install', '%s==%s' % (package, version), '--target', install_path]) def install_skulpt(argv): From ae51254d589c0fc1ba65ec631d793e8b918ef068 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 18:30:07 +0530 Subject: [PATCH 045/141] fix --- scripts/install_third_party_libs.py | 88 +++++++++-------------------- scripts/pre_commit_linter.py | 2 +- 2 files changed, 29 insertions(+), 61 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 2bc95e9c835d..2bdf0a2202e4 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -16,7 +16,6 @@ from __future__ import absolute_import # pylint: disable=import-only-modules import argparse -import contextlib import fileinput import os import shutil @@ -41,24 +40,6 @@ action='store_true') -@contextlib.contextmanager -def _redirect_stdout(new_target): - """Redirect stdout to the new target. - - Args: - new_target: TextIOWrapper. The new target to which stdout is redirected. - - Yields: - TextIOWrapper. The new target. - """ - old_target = sys.stdout - sys.stdout = new_target - try: - yield new_target - finally: - sys.stdout = old_target - - def pip_install(package, version, install_path): """Installs third party libraries with pip. @@ -140,12 +121,7 @@ def install_skulpt(argv): # Use a specific Skulpt release. subprocess.call('git checkout 0.10.0'.split()) - # Add a temporary backup file so that this script works on both - # Linux and Mac. - tmp_file = '/tmp/backup.XXXXXXXXXX' - python_utils.PRINT('Compiling Skulpt') - target_stdout = python_utils.string_io() # The Skulpt setup function needs to be tweaked. It fails without # certain third party commands. These are only used for unit tests # and generating documentation and are not necessary when building @@ -153,37 +129,29 @@ def install_skulpt(argv): for line in fileinput.input( files=[os.path.join( common.OPPIA_TOOLS_DIR, - 'skulpt-0.10.0/skulpt/skulpt.py')]): + 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): # Inside this loop the STDOUT will be redirected to the file. # The comma after each python_utils.PRINT statement is needed to # avoid double line breaks. - with _redirect_stdout(target_stdout): - python_utils.PRINT( - line.replace('ret = test()', 'ret = 0'), - end='') - python_utils.PRINT( - line.replace(' doc()', ' pass#doc()'), - end='') - # This and the next command disable unit and compressed unit - # tests for the compressed distribution of Skulpt. These - # tests don't work on some Ubuntu environments and cause a - # libreadline dependency issue. - python_utils.PRINT( - line.replace( - 'ret = os.system(\'{0}', - 'ret = 0 #os.system(\'{0}'), - end='') - python_utils.PRINT( - line.replace('ret = rununits(opt=True)', 'ret = 0'), - end='') - - temp_file_content = target_stdout.getvalue() - with python_utils.open_file(tmp_file, 'w') as f: - f.write(python_utils.STR(temp_file_content)) - - shutil.move( - tmp_file, os.path.join( - common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')) + python_utils.PRINT( + line.replace('ret = test()', 'ret = 0'), + end='') + python_utils.PRINT( + line.replace(' doc()', ' pass#doc()'), + end='') + # This and the next command disable unit and compressed unit + # tests for the compressed distribution of Skulpt. These + # tests don't work on some Ubuntu environments and cause a + # libreadline dependency issue. + python_utils.PRINT( + line.replace( + 'ret = os.system(\'{0}', + 'ret = 0 #os.system(\'{0}'), + end='') + python_utils.PRINT( + line.replace('ret = rununits(opt=True)', 'ret = 0'), + end='') + subprocess.call( 'python $common.OPPIA_TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py ' 'dist'.split()) @@ -191,13 +159,13 @@ def install_skulpt(argv): # Return to the Oppia root folder. os.chdir(common.CURR_DIR) - # Move the build directory to the static resources folder. - os.makedirs( - os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) - shutil.copytree( - os.path.join( - common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/dist/'), - os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) + # Move the build directory to the static resources folder. + os.makedirs( + os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) + shutil.copytree( + os.path.join( + common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/dist/'), + os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) def main(argv): @@ -206,7 +174,7 @@ def main(argv): ('future', '0.17.1', common.THIRD_PARTY_DIR), ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR), ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR), - ('pylint-quotes', '0.2.1', common.OPPIA_TOOLS_DIR), + ('pylint-quotes', '0.1.8', common.OPPIA_TOOLS_DIR), ('webtest', '2.0.33', common.OPPIA_TOOLS_DIR), ('isort', '4.3.20', common.OPPIA_TOOLS_DIR), ('pycodestyle', '2.5.0', common.OPPIA_TOOLS_DIR), diff --git a/scripts/pre_commit_linter.py b/scripts/pre_commit_linter.py index 83fda7b78162..65c6c4d35c1b 100644 --- a/scripts/pre_commit_linter.py +++ b/scripts/pre_commit_linter.py @@ -574,7 +574,7 @@ os.path.join(_PARENT_DIR, 'oppia_tools', 'browsermob-proxy-0.8.0'), os.path.join(_PARENT_DIR, 'oppia_tools', 'esprima-4.0.1'), os.path.join(_PARENT_DIR, 'oppia_tools', 'pycodestyle-2.5.0'), - os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-quotes-0.2.1'), + os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-quotes-0.1.8'), os.path.join(_PARENT_DIR, 'oppia_tools', 'selenium-3.13.0'), os.path.join(_PARENT_DIR, 'oppia_tools', 'PyGithub-1.43.7'), os.path.join(_PARENT_DIR, 'oppia_tools', 'psutil-5.6.3'), From a7e21c5912e4ccdf1a9ec229f5ff4051021bc3a4 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 19:46:26 +0530 Subject: [PATCH 046/141] fix --- scripts/create_expression_parser.py | 1 + scripts/install_third_party_libs.py | 24 +++++++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 2cb1eb0ed145..355cd666371c 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -63,6 +63,7 @@ def main(): '\'ExpressionParserService\', [\'$log\', function($log) {', line), end='') + for line in fileinput.input(files=[expression_parser_js], inplace=True): python_utils.PRINT( re.sub(r'^})();\s*$', '}]);', line), end='') diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 2bdf0a2202e4..c333f6e96121 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -136,9 +136,19 @@ def install_skulpt(argv): python_utils.PRINT( line.replace('ret = test()', 'ret = 0'), end='') + + for line in fileinput.input( + files=[os.path.join( + common.OPPIA_TOOLS_DIR, + 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): python_utils.PRINT( line.replace(' doc()', ' pass#doc()'), end='') + + for line in fileinput.input( + files=[os.path.join( + common.OPPIA_TOOLS_DIR, + 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): # This and the next command disable unit and compressed unit # tests for the compressed distribution of Skulpt. These # tests don't work on some Ubuntu environments and cause a @@ -148,20 +158,24 @@ def install_skulpt(argv): 'ret = os.system(\'{0}', 'ret = 0 #os.system(\'{0}'), end='') + + for line in fileinput.input( + files=[os.path.join( + common.OPPIA_TOOLS_DIR, + 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): python_utils.PRINT( line.replace('ret = rununits(opt=True)', 'ret = 0'), end='') - subprocess.call( - 'python $common.OPPIA_TOOLS_DIR/skulpt-0.10.0/skulpt/skulpt.py ' - 'dist'.split()) + subprocess.call(( + 'python %s dist' % os.path.join( + common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')) + .split()) # Return to the Oppia root folder. os.chdir(common.CURR_DIR) # Move the build directory to the static resources folder. - os.makedirs( - os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) shutil.copytree( os.path.join( common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/dist/'), From e9fd69de690cbd0cefe876bfbde551ab3ade39dc Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 20:06:41 +0530 Subject: [PATCH 047/141] fix --- scripts/install_third_party_libs.py | 24 ++++++++++++++++++++++++ scripts/run_e2e_tests.py | 3 ++- scripts/run_frontend_tests.py | 3 ++- scripts/run_performance_tests.py | 3 ++- scripts/setup.py | 23 ----------------------- 5 files changed, 30 insertions(+), 26 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index c333f6e96121..ffecc906e76a 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -24,10 +24,12 @@ import python_utils +from . import build from . import common from . import install_third_party from . import pre_commit_hook from . import pre_push_hook +from . import setup _PARSER = argparse.ArgumentParser() _PARSER.add_argument( @@ -182,8 +184,30 @@ def install_skulpt(argv): os.path.join(common.THIRD_PARTY_DIR, 'static/skulpt-0.10.0')) +def maybe_install_dependencies( + skip_installing_third_party_libs, run_minified_tests): + """Parse additional command line arguments.""" + if skip_installing_third_party_libs is False: + # Install third party dependencies. + main([]) + # Ensure that generated JS and CSS files are in place before running the + # tests. + python_utils.PRINT('') + python_utils.PRINT('Running build task with concatenation only') + python_utils.PRINT('') + build.build([]) + + if run_minified_tests is True: + python_utils.PRINT('') + python_utils.PRINT( + 'Running build task with concatenation and minification') + python_utils.PRINT('') + build.build(['--prod_env']) + + def main(argv): """Install third-party libraries for Oppia.""" + setup.main() pip_dependencies = [ ('future', '0.17.1', common.THIRD_PARTY_DIR), ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR), diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 03786ff881e8..d73f66607ae3 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -57,6 +57,7 @@ from . import build from . import common from . import install_chrome_on_travis +from . import install_third_party_libs from . import setup from . import setup_gae @@ -123,7 +124,7 @@ def main(argv): install_chrome_on_travis.main() parsed_args, _ = _PARSER.parse_known_args(args=argv) - setup.maybe_install_dependencies( + install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) if not common.is_port_open(8181): diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 1b74fbc7c69c..94401d48e75d 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -37,6 +37,7 @@ import python_utils from . import build +from . import install_third_party_libs from . import setup from . import setup_gae @@ -60,7 +61,7 @@ def main(argv): if os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): xvfb_prefix = '/usr/bin/xvfb-run' parsed_args, _ = _PARSER.parse_known_args(args=argv) - setup.maybe_install_dependencies( + install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) python_utils.PRINT('') python_utils.PRINT( diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 54c156de7772..5478f7cddf99 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -38,6 +38,7 @@ from . import clean from . import common +from . import install_third_party_libs from . import setup from . import setup_gae @@ -88,7 +89,7 @@ def main(argv): setup_gae.main() parsed_args, _ = _PARSER.parse_known_args(args=argv) - setup.maybe_install_dependencies( + install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) if not common.is_port_open(8181): diff --git a/scripts/setup.py b/scripts/setup.py index 536ee2bd2e7d..e6781b94e4c9 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -25,9 +25,7 @@ import python_utils -from . import build from . import common -from . import install_third_party_libs def delete_directory_tree(directory_path): @@ -54,27 +52,6 @@ def create_directory(directory_path): os.makedirs(directory_path) -def maybe_install_dependencies( - skip_installing_third_party_libs, run_minified_tests): - """Parse additional command line arguments.""" - if skip_installing_third_party_libs is False: - # Install third party dependencies. - install_third_party_libs.main([]) - # Ensure that generated JS and CSS files are in place before running the - # tests. - python_utils.PRINT('') - python_utils.PRINT('Running build task with concatenation only') - python_utils.PRINT('') - build.build([]) - - if run_minified_tests is True: - python_utils.PRINT('') - python_utils.PRINT( - 'Running build task with concatenation and minification') - python_utils.PRINT('') - build.build(['--prod_env']) - - # This function takes a command for python as its only input. # It checks this input for a specific version of python and returns false # if it does not match the expected prefix. From b26d7dc42ca9da84043f25a2d72b85e86a7dc647 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 20:31:30 +0530 Subject: [PATCH 048/141] fix --- scripts/setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/setup.py b/scripts/setup.py index e6781b94e4c9..5e4cbbe6b242 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -148,6 +148,9 @@ def main(): tar.extractall(path=common.OPPIA_TOOLS_DIR) tar.close() os.remove('node-download.tgz') + os.rename( + os.path.join(common.OPPIA_TOOLS_DIR, node_file_name), + common.NODE_PATH) # Change ownership of $NODE_MODULE_DIR. # Note: on some machines, these commands seem to take quite a long time. From 33a10cec24c88d8dbaaf4898e0d049f7a9eb7a85 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 21:13:42 +0530 Subject: [PATCH 049/141] add node path --- scripts/common.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/common.py b/scripts/common.py index 7a78b9cf45e5..52df9b295812 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -39,6 +39,7 @@ OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/google-cloud-sdk') NODE_PATH = os.path.join(OPPIA_TOOLS_DIR, 'node-10.15.3') FRONTEND_DIR = 'core/templates/dev/head' +os.environ['PATH'] = '%s/bin:' % NODE_PATH + os.environ['PATH'] def ensure_directory_exists(d): From 8ba23a541df0061501e6168192efba7310b01653 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 22:25:56 +0530 Subject: [PATCH 050/141] fix --- scripts/common.py | 30 ++++++++++++++++++++++++++++++ scripts/run_performance_tests.py | 4 ++-- scripts/setup.py | 4 ++-- 3 files changed, 34 insertions(+), 4 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index 52df9b295812..294a963deb2c 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -226,6 +226,36 @@ def run_command(command): return subprocess.check_output(command.split()) +def recursive_chown(path, uid, gid): + """Changes the owner and group id of all files in a path to the numeric + uid and gid. + + Args: + path: str. The path for which owner id and group id need to be setup. + uid: int. Owner ID to be set. + gid: int. Group ID to be set. + """ + for root, dirs, files in os.walk(path): + for directory in dirs: + os.chown(os.path.join(root, directory), uid, gid) + for filename in files: + os.chown(os.path.join(root, filename), uid, gid) + + +def recursive_chmod(path, mode): + """Changes the mode of path to the passed numeric mode. + + Args: + path: str. The path for which mode would be set. + mode: int. The mode to be set. + """ + for root, dirs, files in os.walk(path): + for directory in dirs: + os.chmod(os.path.join(root, directory), mode) + for filename in files: + os.chmod(os.path.join(root, filename), mode) + + class CD(python_utils.OBJECT): """Context manager for changing the current working directory.""" diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 5478f7cddf99..cc0bb2b75ecd 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -109,7 +109,7 @@ def main(argv): common.OPPIA_TOOLS_DIR, 'browsermob-proxy-2.1.1/bin/browsermob-proxy') # Change execute status of browsermob-proxy. - os.chmod(browsermob_proxy_path, 744) + common.recursive_chmod(browsermob_proxy_path, 744) # Start a demo server. background_process = subprocess.Popen(( @@ -148,7 +148,7 @@ def main(argv): run_performance_test('profile_page_test', xvfb_prefix) run_performance_test('splash_test', xvfb_prefix) - os.chmod(browsermob_proxy_path, 644) + common.recursive_chmod(browsermob_proxy_path, 644) clean.delete_file('bmp.log') clean.delete_file('server.log') diff --git a/scripts/setup.py b/scripts/setup.py index 5e4cbbe6b242..c37dea320418 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -154,8 +154,8 @@ def main(): # Change ownership of $NODE_MODULE_DIR. # Note: on some machines, these commands seem to take quite a long time. - os.chown('node_modules/', os.getuid(), -1) - os.chmod('node_modules/', 744) + common.recursive_chown('node_modules/', os.getuid(), -1) + common.recursive_chmod('node_modules/', 744) # Adjust path to support the default Chrome locations for Unix, Windows and # Mac OS. From db7a0031971c321452ab5ea8db4d23179b49d37d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 22:47:51 +0530 Subject: [PATCH 051/141] fix lint --- scripts/install_third_party_libs.py | 2 +- scripts/start.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index ffecc906e76a..a4b12287d7ba 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -84,7 +84,7 @@ def pip_install(package, version, install_path): 'install', '%s==%s' % (package, version), '--target', install_path]) # For pip version > 10. else: - import pip._internal + import pip._internal # pylint: disable=no-name-in-module # pylint: disable=import-error pip._internal.main(args=[ # pylint: disable=protected-access 'install', '%s==%s' % (package, version), '--target', install_path]) diff --git a/scripts/start.py b/scripts/start.py index 0c51508e5a5d..bfdf9f727d94 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -138,7 +138,7 @@ def main(argv): background_processes.append(subprocess.Popen( 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' ' --watch'.split())) - # Give webpack few seconds to do the initíal compilation. + # Give webpack few seconds to do the initial compilation. time.sleep(10) python_utils.PRINT('Starting GAE development server') From f521c7ab8dadb78dff7cefb80ef750f294ad92ed Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 27 Aug 2019 23:08:06 +0530 Subject: [PATCH 052/141] add args --- scripts/run_backend_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index fbf8b4bbbe09..a0c48da7c489 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -112,7 +112,7 @@ def main(argv): 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' .split()) - backend_tests.main([]) + backend_tests.main(argv) if parsed_args.generate_coverage_report: subprocess.call(('python %s combine' % coverage_path).split()) From b6d5878423108c9ab4de28ccfb4b4811ba33a189 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 00:00:00 +0530 Subject: [PATCH 053/141] fix lint --- scripts/install_third_party_libs.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index a4b12287d7ba..62ede89ccbcc 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -84,10 +84,13 @@ def pip_install(package, version, install_path): 'install', '%s==%s' % (package, version), '--target', install_path]) # For pip version > 10. else: - import pip._internal # pylint: disable=no-name-in-module # pylint: disable=import-error + # pylint: disable=no-name-in-module + # pylint: disable=import-error + import pip._internal pip._internal.main(args=[ # pylint: disable=protected-access 'install', '%s==%s' % (package, version), '--target', install_path]) - + # pylint: enable=no-name-in-module + # pylint: enable=import-error def install_skulpt(argv): """Download and install Skulpt. Skulpt is built using a Python script From b912d6dcf5283ec2706faa5e7e504ce5cd26f4b3 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 00:17:01 +0530 Subject: [PATCH 054/141] add path --- scripts/run_backend_tests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index a0c48da7c489..7ef94a3137e9 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -112,6 +112,7 @@ def main(argv): 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' .split()) + sys.path.append(os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.33')) backend_tests.main(argv) if parsed_args.generate_coverage_report: From efabda0b54138c53304667d392a0fcf2a7ea0f4f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 00:45:55 +0530 Subject: [PATCH 055/141] address comments --- scripts/backend_tests.py | 2 +- scripts/build.py | 4 +-- scripts/build_test.py | 8 ++--- scripts/common.py | 16 +++++----- scripts/create_expression_parser.py | 9 ++---- scripts/install_third_party_libs.py | 10 +++--- scripts/pre_commit_hook.py | 2 +- scripts/pre_push_hook.py | 2 +- scripts/run_backend_tests.py | 49 +++-------------------------- scripts/run_e2e_tests.py | 41 +++++------------------- scripts/run_frontend_tests.py | 22 +++---------- scripts/run_performance_tests.py | 22 +++---------- scripts/run_presubmit_checks.py | 25 +++------------ scripts/run_tests.py | 13 +++----- scripts/start.py | 26 ++++++--------- scripts/vagrant_lock.py | 2 +- 16 files changed, 62 insertions(+), 191 deletions(-) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index 789e95294d1a..a7f346879302 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -297,7 +297,7 @@ def _get_test_target_classes(path): return result -def main(argv): +def main(argv=None): """Run the tests.""" for directory in DIRS_TO_ADD_TO_SYS_PATH: if not os.path.exists(os.path.dirname(directory)): diff --git a/scripts/build.py b/scripts/build.py index 0b751250d032..e2ada3d9187b 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -1347,7 +1347,7 @@ def compile_typescript_files_continuously(project_dir): return -def build(argv): +def main(argv=None): """The main method of this script. Creates a third-party directory where all the JS and CSS dependencies are @@ -1393,4 +1393,4 @@ def build(argv): # The 'no coverage' pragma is used as this line is un-testable. This is because # it will only be called when build.py is used as a script. if __name__ == '__main__': # pragma: no cover - build(sys.argv) + main(sys.argv) diff --git a/scripts/build_test.py b/scripts/build_test.py index caa198848308..893d41546fac 100644 --- a/scripts/build_test.py +++ b/scripts/build_test.py @@ -919,7 +919,7 @@ def mock_compare_file_count(unused_first_dir, unused_second_dir): with ensure_files_exist_swap, build_using_webpack_swap, ( compile_typescript_files_swap), compare_file_count_swap, args_swap: - build.build([]) + build.main() self.assertEqual(check_function_calls, expected_check_function_calls) @@ -949,7 +949,7 @@ def mock_compile_typescript_files_continuously(unused_project_dir): with ensure_files_exist_swap, ( compile_typescript_files_continuously_swap), args_swap: - build.build([]) + build.main() self.assertEqual(check_function_calls, expected_check_function_calls) @@ -981,7 +981,7 @@ def mock_compile_typescript_files(unused_project_dir): with ensure_files_exist_swap, compile_typescript_files_swap, ( assert_raises_regexp_context_manager), args_swap: - build.build([]) + build.main() self.assertEqual(check_function_calls, expected_check_function_calls) @@ -993,6 +993,6 @@ def mock_check_call(cmd, **unused_kwargs): % (build.WEBPACK_FILE, build.WEBPACK_PROD_CONFIG)) with self.swap(subprocess, 'check_call', mock_check_call): - build.build_using_webpack() + build.main_using_webpack() # pylint: enable=protected-access diff --git a/scripts/common.py b/scripts/common.py index 294a963deb2c..bc5e0baf2442 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -15,6 +15,7 @@ """Common utility functions and classes used by multiple Python scripts.""" from __future__ import absolute_import # pylint: disable=import-only-modules +import contextlib import os import signal import socket @@ -23,9 +24,9 @@ import python_utils -_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -_PSUTIL_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'psutil-5.6.3') -sys.path.insert(0, _PSUTIL_PATH) +PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +PSUTIL_PATH = os.path.join(PARENT_DIR, 'oppia_tools', 'psutil-5.6.3') +sys.path.insert(0, PSUTIL_PATH) import psutil # isort:skip # pylint: disable=wrong-import-position @@ -183,7 +184,7 @@ def ensure_release_scripts_folder_exists_and_is_up_to_date(): subprocess.call(['git', 'pull', remote_alias]) -def is_port_open(port): +def is_port_close(port): """Checks if no process is listening to the port. Args: @@ -192,10 +193,9 @@ def is_port_open(port): Return: bool. True if port is open else False. """ - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - result = sock.connect_ex(('localhost', port)) - sock.close() - return bool(result) + with contextlib.closing( + socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: + return bool(s.connect_ex(('localhost', port))) # Credits: https://stackoverflow.com/a/20691431/11755830 diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 355cd666371c..2c74b442e8ec 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -12,12 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: -Run this script from the oppia root folder: - python -m scripts.create_expression_parser -The root folder MUST be named 'oppia'. -It produces the expression parser. -""" +"""It produces the expression parser.""" from __future__ import absolute_import # pylint: disable=import-only-modules import fileinput @@ -42,7 +37,7 @@ def main(): 'core/templates/dev/head/expressions/ExpressionParserService.js') # Install the basic environment, e.g. nodejs. - install_third_party_libs.main([]) + install_third_party_libs.main() python_utils.PRINT( 'Checking whether pegjs is installed in %s' % common.OPPIA_TOOLS_DIR) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 62ede89ccbcc..09225b6e1c38 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -198,17 +198,17 @@ def maybe_install_dependencies( python_utils.PRINT('') python_utils.PRINT('Running build task with concatenation only') python_utils.PRINT('') - build.build([]) + build.main() if run_minified_tests is True: python_utils.PRINT('') python_utils.PRINT( 'Running build task with concatenation and minification') python_utils.PRINT('') - build.build(['--prod_env']) + build.main(argv=['--prod_env']) -def main(argv): +def main(argv=None): """Install third-party libraries for Oppia.""" setup.main() pip_dependencies = [ @@ -250,11 +250,11 @@ def main(argv): install_skulpt(argv) # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') - pre_commit_hook.main(['--install']) + pre_commit_hook.main(argv=['--install']) # Install pre-push script. python_utils.PRINT('Installing pre-push hook for git') - pre_push_hook.main(['--install']) + pre_push_hook.main(argv=['--install']) if __name__ == '__main__': diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index 061ad6ad1038..327f5a91e0ca 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -125,7 +125,7 @@ def _revert_changes_in_package_lock_file(): raise ValueError(err_unstage_cmd) -def main(argv): +def main(argv=None): """Main method for pre-commit hook that checks files added/modified in a commit. """ diff --git a/scripts/pre_push_hook.py b/scripts/pre_push_hook.py index 42864814aec1..3807e67c2c3f 100755 --- a/scripts/pre_push_hook.py +++ b/scripts/pre_push_hook.py @@ -355,7 +355,7 @@ def _does_diff_include_package_json(files_to_lint): return False -def main(argv): +def main(argv=None): """Main method for pre-push hook that executes the Python/JS linters on all files that deviate from develop. """ diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 7ef94a3137e9..278d02ececb9 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -12,48 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: - -Run this script from the oppia root folder: - python -m scripts.run_backend_tests - -It runs all the (Python) backend tests, in parallel. - -===================== -CUSTOMIZATION OPTIONS -===================== - -(1) Generate a coverage report by adding the argument - - --generate_coverage_report - -but note that this will slow down the tests by a factor of 1.5 or more. - -(2) Append a test target to make the script run all tests in a given module -or class, or run a particular test. For example, appending - - --test_target='foo.bar.Baz' - -runs all tests in test class Baz in the foo/bar.py module, and appending - - --test_target='foo.bar.Baz.quux' - -runs the test method quux in the test class Baz in the foo/bar.py module. - -(3) Append a test path to make the script run all tests in a given -subdirectory. For example, appending - - --test_path='core/controllers' - -runs all tests in the core/controllers/ directory. - -(4) Enable the verbose log by add the argument. It will display the outputs of - the tests being run. - - --verbose or -v - -IMPORTANT: Only one of --test_path and --test_target should be specified. -""" +"""It runs all the (Python) backend tests, in parallel.""" from __future__ import absolute_import # pylint: disable=import-only-modules import argparse @@ -77,13 +36,13 @@ action='store_true') -def main(argv): +def main(argv=None): """Runs the backend tests.""" setup.main() setup_gae.main() # Install third party dependencies. - install_third_party_libs.main([]) + install_third_party_libs.main() coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') coverage_path = os.path.join(coverage_home, 'coverage') @@ -100,7 +59,7 @@ def main(argv): 'coverage', '4.5.4', os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4')) - build.build([]) + build.main() # Compile typescript files. python_utils.PRINT('Compiling typescript...') diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index d73f66607ae3..89fbab5b85b5 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -12,34 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: - -Run this script from the oppia root folder: - bash scripts/run_e2e_tests.sh - -Optional arguments: - --browserstack Run the tests on browserstack using the - protractor-browserstack.conf.js file. - --skip-install=true/false If true, skips installing dependencies. The - default value is false. - --sharding=true/false Disables/Enables parallelization of protractor tests. - --sharding-instances=# Sets the number of parallel browsers to open while - sharding. - --prod_env Run the tests in prod mode. Static resources are served from - build directory and use cache slugs. -Sharding must be disabled (either by passing in false to --sharding or 1 to ---sharding-instances) if running any tests in isolation (fit or fdescribe). - --suite=suite_name Performs test for different suites, here suites are the - name of the test files present in core/tests/protractor_desktop/ and - core/test/protractor/ dirs. e.g. for the file - core/tests/protractor/accessibility.js use --suite=accessibility. - For performing a full test, no argument is required. - -The root folder MUST be named 'oppia'. - -Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a -single test or test suite. -""" +"""Runs the end to end tests.""" from __future__ import absolute_import # pylint: disable=import-only-modules import argparse @@ -101,7 +74,7 @@ def cleanup(): # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. - while not common.is_port_open(4444) or not common.is_port_open(9001): + while not common.is_port_close(4444) or not common.is_port_close(9001): time.sleep(1) if os.path.isdir('../protractor-screenshots'): @@ -116,7 +89,7 @@ def cleanup(): python_utils.PRINT('Done!') -def main(argv): +def main(argv=None): """Runs the end to end tests.""" setup.main() setup_gae.main() @@ -127,7 +100,7 @@ def main(argv): install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - if not common.is_port_open(8181): + if not common.is_port_close(8181): python_utils.PRINT('') python_utils.PRINT( 'There is already a server running on localhost:8181.') @@ -151,7 +124,7 @@ def main(argv): python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.build(['--prod_env']) + build.main(argv=['--prod_env']) app_yaml_filepath = 'app.yaml' else: dev_mode = 'true' @@ -161,7 +134,7 @@ def main(argv): python_utils.PRINT( re.sub('\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.build([]) + build.main() app_yaml_filepath = 'app_dev.yaml' # Start a selenium server using chromedriver 2.41. @@ -190,7 +163,7 @@ def main(argv): % (common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath)).split())) # Wait for the servers to come up. - while common.is_port_open(4444) or common.is_port_open(9001): + while common.is_port_close(4444) or common.is_port_close(9001): time.sleep(1) # Delete outdated screenshots. diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 94401d48e75d..f57e29622168 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -12,21 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: -Run this script from the oppia root folder: - python -m scripts.run_frontend_tests - -Optional arguments: - --skip_install. If specified, skips installing dependencies. - --run_minified_tests. If specified, runs frontend karma tests on both - minified and non-minified code. - -The root folder MUST be named 'oppia'. -It runs unit tests for frontend JavaScript code (using Karma). - -Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a -single test or test suite. -""" +"""It runs unit tests for frontend JavaScript code (using Karma).""" from __future__ import absolute_import # pylint: disable=import-only-modules import argparse @@ -53,7 +39,7 @@ action='store_true') -def main(argv): +def main(argv=None): """Runs the frontend tests.""" setup.main() setup_gae.main() @@ -75,7 +61,7 @@ def main(argv): python_utils.PRINT('Running test in development environment') python_utils.PRINT('') - build.build([]) + build.main() start_tests_cmd = ( '%s node_modules/karma/bin/karma start core/tests/karma.conf.ts' @@ -87,7 +73,7 @@ def main(argv): python_utils.PRINT('Running test in production environment') python_utils.PRINT('') - build.build(['--prod_env', '--minify_third_party_libs_only']) + build.main(argv=['--prod_env', '--minify_third_party_libs_only']) start_tests_cmd = ( '%s node_modules/karma/bin/karma start ' diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index cc0bb2b75ecd..8c729cd2e49a 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -12,19 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: - -The root folder MUST be named 'oppia'. - -Run all tests sequentially: -- run python -m scripts.run_performance_tests without args in order to run all - tests sequentially. - -Run test for a specific page: -- run python -m scripts.run_performance_tests --test_name=page_test - -page_test is the name of the file containing that test eg. splash_test. -""" +"""Runs the performance tests.""" from __future__ import absolute_import # pylint: disable=import-only-modules import argparse @@ -64,7 +52,7 @@ def cleanup(): # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. - while not common.is_port_open(9501): + while not common.is_port_close(9501): time.sleep(1) python_utils.PRINT('Done!') @@ -83,7 +71,7 @@ def run_performance_test(test_name, xvfb_prefix): % (xvfb_prefix, test_name)).split()) -def main(argv): +def main(argv=None): """Main function to run the performance tests.""" setup.main() setup_gae.main() @@ -92,7 +80,7 @@ def main(argv): install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - if not common.is_port_open(8181): + if not common.is_port_close(8181): python_utils.PRINT( 'There is already a server running on localhost:8181') python_utils.PRINT( @@ -119,7 +107,7 @@ def main(argv): % common.GOOGLE_APP_ENGINE_HOME).split()) # Wait for the servers to come up. - while common.is_port_open(9501): + while common.is_port_close(9501): time.sleep(1) # Install xvfb if not on travis, Used in frontend, e2e tests and performance diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 5b648931bd90..7a2a35c146df 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -12,12 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: - -Run this script from the oppia root folder prior to opening a PR: - python -m scripts.run_presubmit_checks - -It runs the following tests in all cases. +"""It runs the following tests in all cases. - Javascript and Python Linting - Backend Python tests @@ -27,18 +22,6 @@ Note: The test scripts are arranged in increasing order of time taken. This enables a broken build to be detected as quickly as possible. - -===================== -CUSTOMIZATION OPTIONS -===================== - -Set the origin branch to compare against by adding - - --branch=your_branch or -b=your_branch - -By default, if the current branch tip exists on remote origin, -the current branch is compared against its tip on GitHub. -Otherwise it's compared against 'develop'. """ from __future__ import absolute_import # pylint: disable=import-only-modules @@ -58,7 +41,7 @@ help='optional; if specified, the origin branch to compare against.') -def main(argv): +def main(argv=None): """Run the presubmit checks.""" # Run Javascript and Python linters. @@ -91,7 +74,7 @@ def main(argv): if common.FRONTEND_DIR in all_changed_files: # Run frontend unit tests. python_utils.PRINT('Running frontend unit tests') - run_frontend_tests.main(['--run_minified_tests']) + run_frontend_tests.main(argv=['--run_minified_tests']) python_utils.PRINT('Frontend tests passed.') python_utils.PRINT('') else: @@ -101,7 +84,7 @@ def main(argv): # Run backend tests. python_utils.PRINT('Running backend tests') - run_backend_tests.main([]) + run_backend_tests.main() python_utils.PRINT('Backend tests passed.') python_utils.PRINT('') diff --git a/scripts/run_tests.py b/scripts/run_tests.py index edc25a84d18a..360609139a38 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -12,12 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: - -Run this script from the oppia root folder: - python -m scripts.run_tests - -It runs all the tests, in this order: +"""It runs all the tests, in this order: - Frontend Karma unit tests - Backend Python tests - End-to-end Protractor tests @@ -45,19 +40,19 @@ def main(): # Run frontend unit tests. python_utils.PRINT('Running frontend unit tests') - run_frontend_tests.main([]) + run_frontend_tests.main() python_utils.PRINT('Frontend tests passed.') python_utils.PRINT('') # Run backend tests. python_utils.PRINT('Running backend tests') - run_backend_tests.main([]) + run_backend_tests.main() python_utils.PRINT('Backend tests passed.') python_utils.PRINT('') # Run end-to-end tests. python_utils.PRINT('Running end-to-end tests') - run_e2e_tests.main([]) + run_e2e_tests.main() python_utils.PRINT('') python_utils.PRINT( diff --git a/scripts/start.py b/scripts/start.py index bfdf9f727d94..068fd21ace0b 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -12,17 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""INSTRUCTIONS: - -This script starts up a development server running Oppia. It installs any +"""This script starts up a development server running Oppia. It installs any missing third-party dependencies and starts up a local GAE development server. - -Run the script from the oppia root folder: - - python -m scripts.start - -Note that the root folder MUST be named 'oppia'. """ from __future__ import absolute_import # pylint: disable=import-only-modules @@ -69,14 +61,14 @@ def cleanup(): python_utils.PRINT('INFORMATION') python_utils.PRINT('Cleaning up the servers.') python_utils.PRINT('') - while not common.is_port_open(8181): + while not common.is_port_close(8181): time.sleep(1) -def main(argv): +def main(argv=None): """Starts up a development server running Oppia.""" if os.path.isfile('/etc/is_vagrant_vm'): - vagrant_lock.main([]) + vagrant_lock.main() setup.main() setup_gae.main() @@ -85,12 +77,12 @@ def main(argv): atexit.register(cleanup) # Install third party dependencies. - install_third_party_libs.main([]) + install_third_party_libs.main() python_utils.PRINT('Oppia setup complete!') # Check that there isn't a server already running. - if not common.is_port_open(8181): + if not common.is_port_close(8181): python_utils.PRINT('') python_utils.PRINT('WARNING') python_utils.PRINT( @@ -111,7 +103,7 @@ def main(argv): python_utils.PRINT( re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.build(['--prod_env', '--enable_watcher']) + build.main(argv=['--prod_env', '--enable_watcher']) app_yaml_filepath = 'app.yaml' else: constants_env_variable = '\'DEV_MODE\': true' @@ -120,7 +112,7 @@ def main(argv): python_utils.PRINT( re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.build(['--enable_watcher']) + build.main(argv=['--enable_watcher']) app_yaml_filepath = 'app_dev.yaml' # Set up a local dev instance. @@ -150,7 +142,7 @@ def main(argv): enable_console_arg, app_yaml_filepath)).split())) # Wait for the servers to come up. - while common.is_port_open(8181): + while common.is_port_close(8181): time.sleep(1) os_info = os.uname() diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py index 7a81600cbe9f..7fdb86dbfd1f 100644 --- a/scripts/vagrant_lock.py +++ b/scripts/vagrant_lock.py @@ -30,7 +30,7 @@ from . import clean -def main(argv): +def main(argv=None): """Creates a lockfile.""" vagrant_lock_file = './.lock' From 899228a706e5f26103b938ffddca7b6cbc45280f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 03:03:00 +0530 Subject: [PATCH 056/141] address comments --- scripts/common.py | 26 +++++++++ scripts/create_expression_parser.py | 17 +++--- scripts/install_chrome_on_travis.py | 2 +- scripts/install_third_party_libs.py | 71 +++++++++++----------- scripts/run_backend_tests.py | 21 ++++--- scripts/run_e2e_tests.py | 77 ++++++++++++------------ scripts/run_frontend_tests.py | 32 ++++------ scripts/run_performance_tests.py | 28 +++++---- scripts/run_presubmit_checks.py | 7 +-- scripts/run_tests.py | 3 - scripts/setup.py | 47 ++++++--------- scripts/start.py | 91 +++++++++++++---------------- scripts/vagrant_lock.py | 16 ++--- 13 files changed, 214 insertions(+), 224 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index bc5e0baf2442..abe49834730d 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -40,6 +40,7 @@ OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/google-cloud-sdk') NODE_PATH = os.path.join(OPPIA_TOOLS_DIR, 'node-10.15.3') FRONTEND_DIR = 'core/templates/dev/head' +NPM_PATH = os.path.join(NODE_PATH, 'bin/npm') os.environ['PATH'] = '%s/bin:' % NODE_PATH + os.environ['PATH'] @@ -256,6 +257,31 @@ def recursive_chmod(path, mode): os.chmod(os.path.join(root, filename), mode) +def print_string_after_two_new_lines(strings): + """Prints each string after two new lines. + + Args: + strings: list(str). The strings to print. + """ + for string in strings: + python_utils.PRINT('%s\n' % string) + + +def install_npm_library(library, version, path): + """Installs the npm library after ensuring its not already installed. + + Args: + library: str. The library name. + version: str. The library version. + path: str. The installation path for the library. + """ + python_utils.PRINT( + 'Checking whether %s is installed in %s' % (library, path)) + if not os.path.exists('node_modules/%s' % library): + python_utils.PRINT('Installing %s' % library) + subprocess.call([NPM_PATH, 'install', '%s@%s' % (library, version)]) + + class CD(python_utils.OBJECT): """Context manager for changing the current working directory.""" diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 2c74b442e8ec..d806d362fc7d 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -39,18 +39,15 @@ def main(): # Install the basic environment, e.g. nodejs. install_third_party_libs.main() - python_utils.PRINT( - 'Checking whether pegjs is installed in %s' % common.OPPIA_TOOLS_DIR) - if not os.path.exists('node_modules/pegjs'): - python_utils.PRINT('Installing pegjs') - subprocess.call(( - '%s/bin/npm install pegjs@0.8.0' % common.NODE_PATH).split()) + common.install_npm_library('pegjs', '0.8.0', common.OPPIA_TOOLS_DIR) - subprocess.call(( - 'node_modules/pegjs/bin/pegjs %s %s' - % (expression_parser_definition, expression_parser_js)).split()) + subprocess.call([ + 'node_modules/pegjs/bin/pegjs', + expression_parser_definition, expression_parser_js]) for line in fileinput.input(files=[expression_parser_js], inplace=True): + # Inside this loop the STDOUT will be redirected to the file. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub( r'module\.exports.*$', @@ -59,6 +56,8 @@ def main(): line), end='') for line in fileinput.input(files=[expression_parser_js], inplace=True): + # Inside this loop the STDOUT will be redirected to the file. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub(r'^})();\s*$', '}]);', line), end='') diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py index d9d47e69acbf..dc34b0f5fc45 100644 --- a/scripts/install_chrome_on_travis.py +++ b/scripts/install_chrome_on_travis.py @@ -45,7 +45,7 @@ def main(): os.chdir(oppia_dir) python_utils.PRINT('Installing %s' % travis_chrome_path) - subprocess.call(('sudo dpkg -i %s' % travis_chrome_path).split()) + subprocess.call(['sudo', 'dpkg', '-i', travis_chrome_path]) if __name__ == '__main__': diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 09225b6e1c38..e93186089fa0 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -54,21 +54,19 @@ def pip_install(package, version, install_path): python_utils.PRINT('Checking if pip is installed on the local machine') import pip except ImportError: - python_utils.PRINT( + common.print_string_after_two_new_lines([ 'Pip is required to install Oppia dependencies, but pip wasn\'t ' - 'found') - python_utils.PRINT('on your local machine.') - python_utils.PRINT('') - python_utils.PRINT( + 'found', + 'on your local machine.', 'Please see \'Installing Oppia\' on the Oppia developers\' wiki ' - 'page:') + 'page:']) os_info = os.uname() - if os_info[0] != 'Darwin': + if os_info[0] == 'Darwin': python_utils.PRINT( 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Mac-' 'OS%29') - elif os_info[0] != 'Linux': + elif os_info[0] == 'Linux': python_utils.PRINT( 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Linux' '%29') @@ -96,7 +94,7 @@ def install_skulpt(argv): """Download and install Skulpt. Skulpt is built using a Python script included within the Skulpt repository (skulpt.py). This script normally requires GitPython, however the patches to it below - (with the sed operations) lead to it no longer being required. The Python + (with the fileinput.replace) lead to it no longer being required. The Python script is used to avoid having to manually recreate the Skulpt dist build process in install_third_party.py. Note that skulpt.py will issue a warning saying its dist command will not work properly without GitPython, @@ -119,12 +117,12 @@ def install_skulpt(argv): os.chdir(common.OPPIA_TOOLS_DIR) os.mkdir('skulpt-0.10.0') os.chdir('skulpt-0.10.0') - subprocess.call( - 'git clone https://github.com/skulpt/skulpt'.split()) + subprocess.call([ + 'git', 'clone', 'https://github.com/skulpt/skulpt']) os.chdir('skulpt') # Use a specific Skulpt release. - subprocess.call('git checkout 0.10.0'.split()) + subprocess.call(['git', 'checkout', '0.10.0']) python_utils.PRINT('Compiling Skulpt') # The Skulpt setup function needs to be tweaked. It fails without @@ -136,8 +134,7 @@ def install_skulpt(argv): common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): # Inside this loop the STDOUT will be redirected to the file. - # The comma after each python_utils.PRINT statement is needed to - # avoid double line breaks. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( line.replace('ret = test()', 'ret = 0'), end='') @@ -146,6 +143,8 @@ def install_skulpt(argv): files=[os.path.join( common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): + # Inside this loop the STDOUT will be redirected to the file. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( line.replace(' doc()', ' pass#doc()'), end='') @@ -172,10 +171,9 @@ def install_skulpt(argv): line.replace('ret = rununits(opt=True)', 'ret = 0'), end='') - subprocess.call(( - 'python %s dist' % os.path.join( - common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')) - .split()) + subprocess.call([ + 'python', 'dist', os.path.join( + common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')]) # Return to the Oppia root folder. os.chdir(common.CURR_DIR) @@ -192,22 +190,35 @@ def maybe_install_dependencies( """Parse additional command line arguments.""" if skip_installing_third_party_libs is False: # Install third party dependencies. - main([]) + main() # Ensure that generated JS and CSS files are in place before running the # tests. - python_utils.PRINT('') python_utils.PRINT('Running build task with concatenation only') - python_utils.PRINT('') build.main() if run_minified_tests is True: - python_utils.PRINT('') python_utils.PRINT( 'Running build task with concatenation and minification') - python_utils.PRINT('') build.main(argv=['--prod_env']) +def ensure_pip_library_is_installed(package, version, path): + """Installs the pip library after ensuring its not already installed. + + Args: + package: str. The package name. + version: str. The package version. + path: str. The installation path for the package. + """ + python_utils.PRINT( + 'Checking if %s is installed in %s' % (package, path)) + + exact_lib_path = os.path.join(path, '%s-%s' % (package, version)) + if not os.path.exists(exact_lib_path): + python_utils.PRINT('Installing %s' % package) + pip_install(package, version, exact_lib_path) + + def main(argv=None): """Install third-party libraries for Oppia.""" setup.main() @@ -227,27 +238,21 @@ def main(argv=None): ] for package, version, path in pip_dependencies: - python_utils.PRINT( - 'Checking if %s is installed in %s' % (package, path)) - - exact_lib_path = os.path.join(path, '%s-%s' % (package, version)) - if not os.path.exists(exact_lib_path): - python_utils.PRINT('Installing %s' % package) - pip_install(package, version, exact_lib_path) + ensure_pip_library_is_installed(package, version, path) # Download and install required JS and zip files. python_utils.PRINT('Installing third-party JS libraries and zip files.') install_third_party.main() # Install third-party node modules needed for the build process. - subprocess.call(( - '%s/bin/npm install --only=dev' % common.NODE_PATH).split()) + subprocess.call([common.NPM_PATH, 'install', '--only=dev']) # This line removes the 'npm ERR! missing:' messages. For reference, see # this thread: https://github.com/npm/npm/issues/19393#issuecomment- # 374076889. - subprocess.call(('%s/bin/npm dedupe' % common.NODE_PATH).split()) + subprocess.call([common.NPM_PATH, 'dedupe']) install_skulpt(argv) + # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') pre_commit_hook.main(argv=['--install']) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 278d02ececb9..1a8ae946e941 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -63,26 +63,25 @@ def main(argv=None): # Compile typescript files. python_utils.PRINT('Compiling typescript...') - subprocess.call( - 'node_modules/typescript/bin/tsc --project .'.split()) + subprocess.call(['node_modules/typescript/bin/tsc', '--project', '.']) python_utils.PRINT('Compiling webpack...') - subprocess.call( - 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' - .split()) + subprocess.call([ + 'node_modules/webpack/bin/webpack.js', '--config', + 'webpack.dev.config.ts']) sys.path.append(os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.33')) backend_tests.main(argv) if parsed_args.generate_coverage_report: - subprocess.call(('python %s combine' % coverage_path).split()) - subprocess.call( - ('python %s report --omit="%s*","third_party/*","/usr/share/*" ' - '--show-missing' - % (coverage_path, common.OPPIA_TOOLS_DIR)).split()) + subprocess.call(['python', coverage_path, 'combine']) + subprocess.call([ + 'python', coverage_path, 'report', + '--omit="%s*","third_party/*","/usr/share/*"' + % common.OPPIA_TOOLS_DIR, '--show-missing']) python_utils.PRINT('Generating xml coverage report...') - subprocess.call(('python %s xml' % coverage_path).split()) + subprocess.call(['python', coverage_path, 'xml']) python_utils.PRINT('') python_utils.PRINT('Done!') diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 89fbab5b85b5..61a4516bd775 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -121,6 +121,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': false' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): + # Inside this loop the STDOUT will be redirected to the file. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') @@ -131,6 +133,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': true' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): + # Inside this loop the STDOUT will be redirected to the file. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub('\'DEV_MODE\': .*', constants_env_variable, line), end='') @@ -141,26 +145,26 @@ def main(argv=None): # The 'detach' option continues the flow once the server is up and runnning. # The 'quiet' option prints only the necessary information about the server # start-up process. - subprocess.call( - 'node_modules/.bin/webdriver-manager update --versions.chrome 2.41' - .split()) - subprocess.call( - 'node_modules/.bin/webdriver-manager start --versions.chrome 2.41 ' - '--detach --quiet'.split()) + subprocess.call([ + 'node_modules/.bin/webdriver-manager', 'update', + '--versions.chrome', '2.41']) + subprocess.call([ + 'node_modules/.bin/webdriver-manager', 'start', + '--versions.chrome 2.41', '--detach --quiet']) # Start a selenium process. The program sends thousands of lines of useless # info logs to stderr so we discard them. # TODO(jacob): Find a webdriver or selenium argument that controls log # level. background_processes = [] - background_processes.append(subprocess.Popen( - 'node_modules/.bin/webdriver-manager start 2>/dev/null'.split())) + background_processes.append(subprocess.Popen([ + 'node_modules/.bin/webdriver-manager', 'start', '2>/dev/null'])) # Start a demo server. - background_processes.append(subprocess.Popen( - ('python %s/dev_appserver.py --host=0.0.0.0 --port=9001 ' - '--clear_datastore=yes --dev_appserver_log_level=critical ' - '--log_level=critical --skip_sdk_update_check=true $%s' - % (common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath)).split())) + background_processes.append(subprocess.Popen([ + 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, + '--host=0.0.0.0', '--port=9001', '--clear_datastore=yes', + '--dev_appserver_log_level=critical', '--log_level=critical', + '--skip_sdk_update_check=true', app_yaml_filepath])) # Wait for the servers to come up. while common.is_port_close(4444) or common.is_port_close(9001): @@ -177,36 +181,33 @@ def main(argv=None): # TODO(bhenning): Figure out if this is a bug with protractor. if not parsed_args.browserstack: if not parsed_args.sharding or parsed_args.sharding_instances == '1': - subprocess.call(( - 'node_modules/protractor/bin/protractor ' - 'core/tests/protractor.conf.js --suite %s --params.devMode="%s"' - % (parsed_args.suite, dev_mode)).split()) + subprocess.call([ + 'node_modules/protractor/bin/protractor', + 'core/tests/protractor.conf.js', '--suite', parsed_args.suite, + '--params.devMode="%s"' % dev_mode]) else: - subprocess.call(( - 'node_modules/protractor/bin/protractor ' - 'core/tests/protractor.conf.js --capabilities.shardTestFiles=%s' - ' --capabilities.maxInstances=%s --suite %s ' - '--params.devMode="%s"' - % ( - parsed_args.sharding, parsed_args.sharding_instances, - parsed_args.suite, dev_mode)).split()) + subprocess.call([ + 'node_modules/protractor/bin/protractor', + 'core/tests/protractor.conf.js', + '--capabilities.shardTestFiles=%s' % parsed_args.sharding, + '--capabilities.maxInstances=%s' + % parsed_args.sharding_instances, '--suite', parsed_args.suite, + '--params.devMode="%s"' % devMode]) else: python_utils.PRINT('Running the tests on browserstack...') if not parsed_args.sharding or parsed_args.sharding_instances == '1': - subprocess.call( - ('node_modules/protractor/bin/protractor ' - 'core/tests/protractor-browserstack.conf.js --suite %s ' - '--params.devMode="%s"' - % (parsed_args.suite, dev_mode)).split()) + subprocess.call([ + 'node_modules/protractor/bin/protractor', + 'core/tests/protractor-browserstack.conf.js', '--suite', + parsed_args.suite, '--params.devMode="%s"' % dev_mode]) else: - subprocess.call(( - 'node_modules/protractor/bin/protractor ' - 'core/tests/protractor-browserstack.conf.js ' - '--capabilities.shardTestFiles=%s --capabilities.maxInstances=' - '%s --suite %s --params.devMode="%s"' - % ( - parsed_args.sharding, parsed_args.sharding_instances, - parsed_args.suite, dev_mode)).split()) + subprocess.call([ + 'node_modules/protractor/bin/protractor', + 'core/tests/protractor-browserstack.conf.js', + '--capabilities.shardTestFiles=%s' % parsed_args.sharding, + '--capabilities.maxInstances=%s' + % parsed_args.sharding_instances, '--suite', parsed_args.suite, + '--params.devMode="%s"' % dev_mode]) for process in background_processes: process.wait() diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index f57e29622168..d111e6a022b0 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -49,36 +49,26 @@ def main(argv=None): parsed_args, _ = _PARSER.parse_known_args(args=argv) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - python_utils.PRINT('') - python_utils.PRINT( - 'View interactive frontend test coverage reports by navigating to') - python_utils.PRINT('') - python_utils.PRINT(' ../karma_coverage_reports') - python_utils.PRINT('') - python_utils.PRINT(' on your filesystem.') - python_utils.PRINT('') - python_utils.PRINT('') - python_utils.PRINT('Running test in development environment') - python_utils.PRINT('') + common.print_string_after_two_new_lines([ + 'View interactive frontend test coverage reports by navigating to', + '../karma_coverage_reports', + 'on your filesystem.', + 'Running test in development environment']) build.main() - start_tests_cmd = ( - '%s node_modules/karma/bin/karma start core/tests/karma.conf.ts' - % xvfb_prefix) - subprocess.call(start_tests_cmd.split()) + subprocess.call([ + xvfb_prefix, 'node_modules/karma/bin/karma', 'start', + 'core/tests/karma.conf.ts']) if parsed_args.run_minified_tests is True: - python_utils.PRINT('') python_utils.PRINT('Running test in production environment') - python_utils.PRINT('') build.main(argv=['--prod_env', '--minify_third_party_libs_only']) - start_tests_cmd = ( - '%s node_modules/karma/bin/karma start ' - 'core/tests/karma.conf.ts --prodEnv' % xvfb_prefix) - subprocess.call(start_tests_cmd.split()) + subprocess.call([ + xvfb_prefix, 'node_modules/karma/bin/karma', 'start', + 'core/tests/karma.conf.ts', '--prodEnv']) python_utils.PRINT('Done!') diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 8c729cd2e49a..d249a17e8f09 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -65,10 +65,9 @@ def run_performance_test(test_name, xvfb_prefix): test_name: str. The test name to be run. xvfb_prefix: str. The XVFB prefix. """ - subprocess.call(( - '%s python -m scripts.backend_tests ' - '--test_target=core.tests.performance_tests.%s' - % (xvfb_prefix, test_name)).split()) + subprocess.call([ + xvfb_prefix, 'python', '-m', 'scripts.backend_tests', + '--test_target=core.tests.performance_tests.%s' % test_name]) def main(argv=None): @@ -81,11 +80,10 @@ def main(argv=None): parsed_args.skip_install, parsed_args.run_minified_tests) if not common.is_port_close(8181): - python_utils.PRINT( - 'There is already a server running on localhost:8181') - python_utils.PRINT( - 'Please terminate it before running the performance tests.') - python_utils.PRINT('Exiting.') + common.print_string_after_two_new_lines([ + 'There is already a server running on localhost:8181', + 'Please terminate it before running the performance tests.', + 'Exiting.']) sys.exit(1) # Forces the cleanup function to run on exit. @@ -100,11 +98,11 @@ def main(argv=None): common.recursive_chmod(browsermob_proxy_path, 744) # Start a demo server. - background_process = subprocess.Popen(( - 'python %s/dev_appserver.py --host=0.0.0.0 --port=9501 ' - '--clear_datastore=yes --dev_appserver_log_level=critical ' - '--log_level=critical --skip_sdk_update_check=true app_dev.yaml' - % common.GOOGLE_APP_ENGINE_HOME).split()) + background_process = subprocess.Popen([ + 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, + '--host=0.0.0.0', '--port=9501', '--clear_datastore=yes', + '--dev_appserver_log_level=critical', '--log_level=critical', + '--skip_sdk_update_check=true', app_dev.yaml]) # Wait for the servers to come up. while common.is_port_close(9501): @@ -118,7 +116,7 @@ def main(argv=None): # This installs xvfb for systems with apt-get installer like Ubuntu, and # will fail for other systems. # TODO(gvishal): Install/provide xvfb for other systems. - subprocess.call('sudo apt-get install xvfb'.split()) + subprocess.call(['sudo', 'apt-get', 'install', 'xvfb']) xvfb_prefix = '/usr/bin/xvfb-run' # If an argument is present then run test for that specific page. Otherwise diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 7a2a35c146df..3c72d256dead 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -76,17 +76,16 @@ def main(argv=None): python_utils.PRINT('Running frontend unit tests') run_frontend_tests.main(argv=['--run_minified_tests']) python_utils.PRINT('Frontend tests passed.') - python_utils.PRINT('') else: # If files in common.FRONTEND_DIR were not changed, skip the tests. - python_utils.PRINT('No frontend files were changed.') - python_utils.PRINT('Skipped frontend tests') + common.print_string_after_two_new_lines([ + 'No frontend files were changed.', + 'Skipped frontend tests']) # Run backend tests. python_utils.PRINT('Running backend tests') run_backend_tests.main() python_utils.PRINT('Backend tests passed.') - python_utils.PRINT('') if __name__ == '__main__': diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 360609139a38..ccbee4537b4f 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -42,19 +42,16 @@ def main(): python_utils.PRINT('Running frontend unit tests') run_frontend_tests.main() python_utils.PRINT('Frontend tests passed.') - python_utils.PRINT('') # Run backend tests. python_utils.PRINT('Running backend tests') run_backend_tests.main() python_utils.PRINT('Backend tests passed.') - python_utils.PRINT('') # Run end-to-end tests. python_utils.PRINT('Running end-to-end tests') run_e2e_tests.main() - python_utils.PRINT('') python_utils.PRINT( 'SUCCESS All frontend, backend and end-to-end tests passed!') diff --git a/scripts/setup.py b/scripts/setup.py index c37dea320418..d05f7784a2ff 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -63,21 +63,17 @@ def test_python_version(): # path. os_info = os.uname() if os_info[0] != 'Darwin' and os_info[0] != 'Linux': - python_utils.PRINT( + common.print_string_after_two_new_lines([ 'It looks like you are using Windows. If you have Python ' - 'installed,') - python_utils.PRINT( - 'make sure it is in your PATH and that PYTHONPATH is set.') - python_utils.PRINT( + 'installed,', + 'make sure it is in your PATH and that PYTHONPATH is set.', 'If you have two versions of Python (ie, Python 2.7 and 3), ' 'specify 2.7 before other versions of Python when setting the ' - 'PATH.') - python_utils.PRINT('Here are some helpful articles:') - python_utils.PRINT( - 'http://docs.python-guide.org/en/latest/starting/install/win/') - python_utils.PRINT( + 'PATH.', + 'Here are some helpful articles:', + 'http://docs.python-guide.org/en/latest/starting/install/win/', 'https://stackoverflow.com/questions/3701646/how-to-add-to-the-' - 'pythonpath-in-windows-7') + 'pythonpath-in-windows-7']) # Exit when no suitable Python environment can be found. sys.exit(1) @@ -108,21 +104,13 @@ def main(): if os_info[0] != 'Darwin' and os_info[0] != 'Linux': # Node is a requirement for all installation scripts. Here, we check if # the OS supports node.js installation; if not, we exit with an error. - python_utils.PRINT('') - python_utils.PRINT( - 'WARNING: Unsupported OS for installation of node.js.') - python_utils.PRINT( - 'If you are running this script on Windows, see the instructions') - python_utils.PRINT( - 'here regarding installation of node.js:') - python_utils.PRINT('') - python_utils.PRINT( + common.print_string_after_two_new_lines([ + 'WARNING: Unsupported OS for installation of node.js.', + 'If you are running this script on Windows, see the instructions', + 'here regarding installation of node.js:', 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows' - '%29') - python_utils.PRINT('') - python_utils.PRINT( - 'STATUS: Installation completed except for node.js. Exiting.') - python_utils.PRINT('') + '%29', + 'STATUS: Installation completed except for node.js. Exiting.']) sys.exit(1) # Download and install node.js. @@ -163,15 +151,16 @@ def main(): chrome_bin = '/usr/bin/chromium-browser' elif os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): # XVFB is required for headless testing in Vagrant. - subprocess.call('sudo apt-get install xvfb chromium-browser'.split()) + subprocess.call([ + 'sudo', 'apt-get', 'install', 'xvfb', 'chromium-browser']) chrome_bin = '/usr/bin/chromium-browser' # Used in frontend and e2e tests. Only gets set if using Vagrant VM. os.environ['XVFB_PREFIX'] = '/usr/bin/xvfb-run' # Enforce proper ownership on oppia, oppia_tools, and node_modules or # else NPM installs will fail. - subprocess.call( - 'sudo chown -R vagrant.vagrant /home/vagrant/oppia ' - '/home/vagrant/oppia_tools /home/vagrant/node_modules'.split()) + common.recursive_chown('/home/vagrant/oppia', os.getuid(), -1) + common.recursive_chown('/home/vagrant/oppia_tools', os.getuid(), -1) + common.recursive_chown('/home/vagrant/node_modules', os.getuid(), -1) elif os.path.isfile('/usr/bin/google-chrome'): # Unix. chrome_bin = '/usr/bin/google-chrome' diff --git a/scripts/start.py b/scripts/start.py index 068fd21ace0b..7f7548d3cdda 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -57,10 +57,9 @@ def cleanup(): """Function for waiting for the servers to go down.""" - python_utils.PRINT('') - python_utils.PRINT('INFORMATION') - python_utils.PRINT('Cleaning up the servers.') - python_utils.PRINT('') + common.print_string_after_two_new_lines([ + 'INFORMATION', + 'Cleaning up the servers.']) while not common.is_port_close(8181): time.sleep(1) @@ -83,12 +82,10 @@ def main(argv=None): # Check that there isn't a server already running. if not common.is_port_close(8181): - python_utils.PRINT('') - python_utils.PRINT('WARNING') - python_utils.PRINT( - 'Could not start new server. There is already an existing server') - python_utils.PRINT('running at port 8181.') - python_utils.PRINT('') + common.print_string_after_two_new_lines([ + 'WARNING', + 'Could not start new server. There is already an existing server', + 'running at port 8181.']) parsed_args, _ = _PARSER.parse_known_args(args=argv) clear_datastore_arg = ( @@ -100,6 +97,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': false' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): + # Inside this loop the STDOUT will be redirected to the file. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') @@ -109,6 +108,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': true' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): + # Inside this loop the STDOUT will be redirected to the file. + # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub( '\'DEV_MODE\': .*', constants_env_variable, line), end='') @@ -122,24 +123,24 @@ def main(argv=None): # spam people accidentally. background_processes = [] if not parsed_args.prod_env: - background_processes.append(subprocess.Popen(( - '%s/bin/node node_modules/gulp/bin/gulp.js watch' - % common.NODE_PATH).split())) + background_processes.append(subprocess.Popen([ + '%s/bin/node' % common.NODE_PATH, 'node_modules/gulp/bin/gulp.js', + 'watch'])) + # In prod mode webpack is launched through scripts/build.py python_utils.PRINT('Compiling webpack...') - background_processes.append(subprocess.Popen( - 'node_modules/webpack/bin/webpack.js --config webpack.dev.config.ts' - ' --watch'.split())) + background_processes.append(subprocess.Popen([ + 'node_modules/webpack/bin/webpack.js', + '--config', 'webpack.dev.config.ts', '--watch'])) # Give webpack few seconds to do the initial compilation. time.sleep(10) python_utils.PRINT('Starting GAE development server') - background_processes.append(subprocess.Popen(( - 'python %s/dev_appserver.py %s %s --admin_host 0.0.0.0 --admin_port ' - '8000 --host 0.0.0.0 --port 8181 --skip_sdk_update_check true %s' - % ( - common.GOOGLE_APP_ENGINE_HOME, clear_datastore_arg, - enable_console_arg, app_yaml_filepath)).split())) + background_processes.append(subprocess.Popen([ + 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, + clear_datastore_arg, enable_console_arg, '--admin_host', '0.0.0.0', + '--admin_port', '8000', '--host', '0.0.0.0', '--port', '8181', + '--skip_sdk_update_check', 'true', app_yaml_filepath])) # Wait for the servers to come up. while common.is_port_close(8181): @@ -152,44 +153,34 @@ def main(argv=None): if list(filter( detect_virtualbox_pattern.match, os.listdir('/dev/disk/by-id/'))): - python_utils.PRINT('') - python_utils.PRINT('INFORMATION') - python_utils.PRINT( + common.print_string_after_two_new_lines([ + 'INFORMATION', 'Setting up a local development server. You can access this ' - 'server') - python_utils.PRINT( - 'by navigating to localhost:8181 in a browser window.') - python_utils.PRINT('') + 'server', + 'by navigating to localhost:8181 in a browser window.']) else: - python_utils.PRINT('') - python_utils.PRINT('INFORMATION') - python_utils.PRINT( + common.print_string_after_two_new_lines([ + 'INFORMATION', 'Setting up a local development server at localhost:8181. ' - 'Opening a') - python_utils.PRINT('default browser window pointing to this server') - python_utils.PRINT('') + 'Opening a', + 'default browser window pointing to this server']) time.sleep(5) background_processes.append( - subprocess.Popen('xdg-open http://localhost:8181/'.split())) + subprocess.Popen(['xdg-open', 'http://localhost:8181/'])) elif os_info[0] == 'Darwin' and not parsed_args.no_browser: - python_utils.PRINT('') - python_utils.PRINT('INFORMATION') - python_utils.PRINT( + common.print_string_after_two_new_lines([ + 'INFORMATION', 'Setting up a local development server at localhost:8181. ' - 'Opening a') - python_utils.PRINT('default browser window pointing to this server.') - python_utils.PRINT('') + 'Opening a', + 'default browser window pointing to this server.']) time.sleep(5) background_processes.append( - subprocess.Popen('open http://localhost:8181/'.split())) + subprocess.Popen(['open', 'http://localhost:8181/'])) else: - python_utils.PRINT('') - python_utils.PRINT('INFORMATION') - python_utils.PRINT( - 'Setting up a local development server. You can access this server') - python_utils.PRINT( - 'by navigating to localhost:8181 in a browser window.') - python_utils.PRINT('') + common.print_string_after_two_new_lines([ + 'INFORMATION', + 'Setting up a local development server. You can access this server', + 'by navigating to localhost:8181 in a browser window.']) python_utils.PRINT('Done!') diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py index 7fdb86dbfd1f..f14fe8698166 100644 --- a/scripts/vagrant_lock.py +++ b/scripts/vagrant_lock.py @@ -27,6 +27,7 @@ import python_utils +from . import common from . import clean @@ -45,16 +46,11 @@ def main(argv=None): sys.exit(0) if os.path.isfile(vagrant_lock_file): - python_utils.PRINT('') - python_utils.PRINT('Another setup instance is already running') - python_utils.PRINT('') - python_utils.PRINT( - 'Please wait for that instance to complete or terminate it') - python_utils.PRINT('') - python_utils.PRINT( - 'If you ran $0 twice on purpose, you can override this with ' - '--nolock') - python_utils.PRINT('') + common.print_string_after_two_new_lines([ + 'Another setup instance is already running', + 'Please wait for that instance to complete or terminate it', + 'If you ran $0 twice on purpose, you can override this with ', + '--nolock']) sys.exit(1) else: os.utime(vagrant_lock_file, None) From 6044c233211a9a95757afb17934ebf8477804389 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 03:40:00 +0530 Subject: [PATCH 057/141] fix --- scripts/backend_tests.py | 2 +- scripts/build.py | 2 +- scripts/create_expression_parser.py | 1 - scripts/install_third_party_libs.py | 7 ++++--- scripts/pre_commit_hook.py | 2 +- scripts/pre_push_hook.py | 2 +- scripts/run_backend_tests.py | 4 ++-- scripts/run_e2e_tests.py | 4 ++-- scripts/run_frontend_tests.py | 3 ++- scripts/run_performance_tests.py | 4 ++-- scripts/run_presubmit_checks.py | 2 +- scripts/start.py | 2 +- scripts/vagrant_lock.py | 6 ++---- 13 files changed, 20 insertions(+), 21 deletions(-) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index a7f346879302..91a18f26a9ae 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -448,4 +448,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/build.py b/scripts/build.py index e2ada3d9187b..4d0aee76541a 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -1393,4 +1393,4 @@ def main(argv=None): # The 'no coverage' pragma is used as this line is un-testable. This is because # it will only be called when build.py is used as a script. if __name__ == '__main__': # pragma: no cover - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index d806d362fc7d..3e1ba6f36acf 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -16,7 +16,6 @@ from __future__ import absolute_import # pylint: disable=import-only-modules import fileinput -import os import re import subprocess diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index e93186089fa0..05cbb6691834 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -90,6 +90,7 @@ def pip_install(package, version, install_path): # pylint: enable=no-name-in-module # pylint: enable=import-error + def install_skulpt(argv): """Download and install Skulpt. Skulpt is built using a Python script included within the Skulpt repository (skulpt.py). This script normally @@ -245,11 +246,11 @@ def main(argv=None): install_third_party.main() # Install third-party node modules needed for the build process. - subprocess.call([common.NPM_PATH, 'install', '--only=dev']) + subprocess.call(['sudo', common.NPM_PATH, 'install', '--only=dev']) # This line removes the 'npm ERR! missing:' messages. For reference, see # this thread: https://github.com/npm/npm/issues/19393#issuecomment- # 374076889. - subprocess.call([common.NPM_PATH, 'dedupe']) + # subprocess.call([common.NPM_PATH, 'dedupe']) install_skulpt(argv) @@ -263,4 +264,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index 327f5a91e0ca..e46cc4aecf21 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -149,4 +149,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/pre_push_hook.py b/scripts/pre_push_hook.py index 3807e67c2c3f..f181a1b99885 100755 --- a/scripts/pre_push_hook.py +++ b/scripts/pre_push_hook.py @@ -407,4 +407,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 1a8ae946e941..bb00a4f63b77 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -71,7 +71,7 @@ def main(argv=None): 'webpack.dev.config.ts']) sys.path.append(os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.33')) - backend_tests.main(argv) + backend_tests.main(argv=argv) if parsed_args.generate_coverage_report: subprocess.call(['python', coverage_path, 'combine']) @@ -88,4 +88,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 61a4516bd775..60452b2aa9af 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -192,7 +192,7 @@ def main(argv=None): '--capabilities.shardTestFiles=%s' % parsed_args.sharding, '--capabilities.maxInstances=%s' % parsed_args.sharding_instances, '--suite', parsed_args.suite, - '--params.devMode="%s"' % devMode]) + '--params.devMode="%s"' % dev_mode]) else: python_utils.PRINT('Running the tests on browserstack...') if not parsed_args.sharding or parsed_args.sharding_instances == '1': @@ -214,4 +214,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index d111e6a022b0..0a7cc1185abd 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -23,6 +23,7 @@ import python_utils from . import build +from . import common from . import install_third_party_libs from . import setup from . import setup_gae @@ -74,4 +75,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index d249a17e8f09..9d70dbbda045 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -102,7 +102,7 @@ def main(argv=None): 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, '--host=0.0.0.0', '--port=9501', '--clear_datastore=yes', '--dev_appserver_log_level=critical', '--log_level=critical', - '--skip_sdk_update_check=true', app_dev.yaml]) + '--skip_sdk_update_check=true', 'app_dev.yaml']) # Wait for the servers to come up. while common.is_port_close(9501): @@ -142,4 +142,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 3c72d256dead..b683acfef5b9 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -89,4 +89,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/start.py b/scripts/start.py index 7f7548d3cdda..46a4efb16edd 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -189,4 +189,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py index f14fe8698166..64c28f8ba9be 100644 --- a/scripts/vagrant_lock.py +++ b/scripts/vagrant_lock.py @@ -25,10 +25,8 @@ import os import sys -import python_utils - -from . import common from . import clean +from . import common def main(argv=None): @@ -58,4 +56,4 @@ def main(argv=None): if __name__ == '__main__': - main(sys.argv) + main(argv=sys.argv) From c28656b84b733a7d07667f780bdbf72e0ac86ce0 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 03:47:47 +0530 Subject: [PATCH 058/141] fix --- scripts/install_third_party_libs.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 05cbb6691834..90cc691bedd3 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -173,8 +173,9 @@ def install_skulpt(argv): end='') subprocess.call([ - 'python', 'dist', os.path.join( - common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py')]) + 'python', os.path.join( + common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py'), + 'dist']) # Return to the Oppia root folder. os.chdir(common.CURR_DIR) From 2d62316b8fe54df85c516da4718a31b9a2902833 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 04:00:24 +0530 Subject: [PATCH 059/141] fix --- scripts/install_third_party_libs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 90cc691bedd3..b00e53b41e4f 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -247,7 +247,7 @@ def main(argv=None): install_third_party.main() # Install third-party node modules needed for the build process. - subprocess.call(['sudo', common.NPM_PATH, 'install', '--only=dev']) + subprocess.call([common.NPM_PATH, 'install', '--only=dev']) # This line removes the 'npm ERR! missing:' messages. For reference, see # this thread: https://github.com/npm/npm/issues/19393#issuecomment- # 374076889. From 1e7bcdb0c1b913bea5b488e1b734f2748c2a9a3d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 04:01:31 +0530 Subject: [PATCH 060/141] fix --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 67bd074327f7..70de23ab5f74 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -112,6 +112,7 @@ jobs: - restore_cache: <<: *restore_cache - run: sudo pip install pyyaml + - run: sudo pip install webtest - run: name: Run backend tests command: | From 3a973318aa9a9d3f7bb79e0eebd6e754d21909aa Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 05:50:50 +0530 Subject: [PATCH 061/141] fix frontend --- scripts/run_frontend_tests.py | 22 ++++++++++++++++------ scripts/setup.py | 2 +- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 0a7cc1185abd..a3a4f211385c 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -58,18 +58,28 @@ def main(argv=None): build.main() - subprocess.call([ - xvfb_prefix, 'node_modules/karma/bin/karma', 'start', - 'core/tests/karma.conf.ts']) + if xvfb_prefix: + subprocess.call([ + xvfb_prefix, 'node_modules/karma/bin/karma', 'start', + 'core/tests/karma.conf.ts']) + else: + subprocess.call([ + 'node_modules/karma/bin/karma', 'start', + 'core/tests/karma.conf.ts']) if parsed_args.run_minified_tests is True: python_utils.PRINT('Running test in production environment') build.main(argv=['--prod_env', '--minify_third_party_libs_only']) - subprocess.call([ - xvfb_prefix, 'node_modules/karma/bin/karma', 'start', - 'core/tests/karma.conf.ts', '--prodEnv']) + if xvfb_prefix: + subprocess.call([ + xvfb_prefix, 'node_modules/karma/bin/karma', 'start', + 'core/tests/karma.conf.ts', '--prodEnv']) + else: + subprocess.call([ + 'node_modules/karma/bin/karma', 'start', + 'core/tests/karma.conf.ts', '--prodEnv']) python_utils.PRINT('Done!') diff --git a/scripts/setup.py b/scripts/setup.py index d05f7784a2ff..1d8becfa7af2 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -140,7 +140,7 @@ def main(): os.path.join(common.OPPIA_TOOLS_DIR, node_file_name), common.NODE_PATH) - # Change ownership of $NODE_MODULE_DIR. + # Change ownership of node_modules. # Note: on some machines, these commands seem to take quite a long time. common.recursive_chown('node_modules/', os.getuid(), -1) common.recursive_chmod('node_modules/', 744) From cfad6b3ae9f56f72ece41b655f55418ce12ecf71 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 05:57:28 +0530 Subject: [PATCH 062/141] fix --- scripts/install_third_party_libs.py | 2 +- scripts/run_performance_tests.py | 11 ++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index b00e53b41e4f..a81854f1a8a8 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -251,7 +251,7 @@ def main(argv=None): # This line removes the 'npm ERR! missing:' messages. For reference, see # this thread: https://github.com/npm/npm/issues/19393#issuecomment- # 374076889. - # subprocess.call([common.NPM_PATH, 'dedupe']) + subprocess.call([common.NPM_PATH, 'dedupe']) install_skulpt(argv) diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 9d70dbbda045..d466ba8a0372 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -65,9 +65,14 @@ def run_performance_test(test_name, xvfb_prefix): test_name: str. The test name to be run. xvfb_prefix: str. The XVFB prefix. """ - subprocess.call([ - xvfb_prefix, 'python', '-m', 'scripts.backend_tests', - '--test_target=core.tests.performance_tests.%s' % test_name]) + if xvfb_prefix: + subprocess.call([ + xvfb_prefix, 'python', '-m', 'scripts.backend_tests', + '--test_target=core.tests.performance_tests.%s' % test_name]) + else: + subprocess.call([ + 'python', '-m', 'scripts.backend_tests', + '--test_target=core.tests.performance_tests.%s' % test_name]) def main(argv=None): From 11ae549e86be658014bf0d3f38996778285ed54d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 16:58:38 +0530 Subject: [PATCH 063/141] fix start --- scripts/start.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/start.py b/scripts/start.py index 46a4efb16edd..159dee0cd57b 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -136,11 +136,11 @@ def main(argv=None): time.sleep(10) python_utils.PRINT('Starting GAE development server') - background_processes.append(subprocess.Popen([ - 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, - clear_datastore_arg, enable_console_arg, '--admin_host', '0.0.0.0', - '--admin_port', '8000', '--host', '0.0.0.0', '--port', '8181', - '--skip_sdk_update_check', 'true', app_yaml_filepath])) + background_processes.append(subprocess.Popen( + 'python %s/dev_appserver.py %s %s --admin_host 0.0.0.0 --admin_port ' + '8000 --host 0.0.0.0 --port 8181 --skip_sdk_update_check true %s' % ( + common.GOOGLE_APP_ENGINE_HOME, clear_datastore_arg, + enable_console_arg, app_yaml_filepath), shell=True)) # Wait for the servers to come up. while common.is_port_close(8181): From 46eb948d07ed50a0a33924217609c01b3d38736b Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 17:02:27 +0530 Subject: [PATCH 064/141] fix lint --- scripts/install_third_party_libs.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index a81854f1a8a8..b2920bd43195 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -82,13 +82,11 @@ def pip_install(package, version, install_path): 'install', '%s==%s' % (package, version), '--target', install_path]) # For pip version > 10. else: - # pylint: disable=no-name-in-module - # pylint: disable=import-error + # pylint: disable=no-name-in-module, import-error import pip._internal pip._internal.main(args=[ # pylint: disable=protected-access 'install', '%s==%s' % (package, version), '--target', install_path]) - # pylint: enable=no-name-in-module - # pylint: enable=import-error + # pylint: enable=no-name-in-module, import-error def install_skulpt(argv): From 95b6ca9ae231c14f229db4d8bc8c1a45c7e5b9ea Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 17:20:21 +0530 Subject: [PATCH 065/141] clear cache --- .circleci/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 70de23ab5f74..1f9fbc93cb44 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -29,8 +29,8 @@ anchor_for_installing_cc_test_reporter: &install_cc anchor_for_restoring_cache: &restore_cache keys: - - setup-files-cache-{{ checksum "date" }} - - third-party-cache-{{ checksum "date" }} + - v1-setup-files-cache-{{ checksum "date" }} + - v1-third-party-cache-{{ checksum "date" }} version: 2 jobs: @@ -44,7 +44,7 @@ jobs: - run: <<: *install_dependencies - save_cache: - key: setup-files-cache-{{ checksum "date" }} + key: v1-setup-files-cache-{{ checksum "date" }} paths: - node_modules/ - ../oppia_tools/ From 7130eb7319f288bd2dfa4ac6d2f4218148419528 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 17:36:32 +0530 Subject: [PATCH 066/141] revert --- .circleci/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1f9fbc93cb44..70de23ab5f74 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -29,8 +29,8 @@ anchor_for_installing_cc_test_reporter: &install_cc anchor_for_restoring_cache: &restore_cache keys: - - v1-setup-files-cache-{{ checksum "date" }} - - v1-third-party-cache-{{ checksum "date" }} + - setup-files-cache-{{ checksum "date" }} + - third-party-cache-{{ checksum "date" }} version: 2 jobs: @@ -44,7 +44,7 @@ jobs: - run: <<: *install_dependencies - save_cache: - key: v1-setup-files-cache-{{ checksum "date" }} + key: setup-files-cache-{{ checksum "date" }} paths: - node_modules/ - ../oppia_tools/ From 13155a5b8648d1719642c50a5664f8fe562947b2 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 17:38:32 +0530 Subject: [PATCH 067/141] remove pylint --- scripts/docstrings_checker.py | 4 ---- scripts/pylint_extensions_test.py | 4 ---- 2 files changed, 8 deletions(-) diff --git a/scripts/docstrings_checker.py b/scripts/docstrings_checker.py index fea279db5bb4..e90809c4b210 100644 --- a/scripts/docstrings_checker.py +++ b/scripts/docstrings_checker.py @@ -24,10 +24,6 @@ import python_utils -_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') -sys.path.insert(0, _PYLINT_PATH) - # pylint: disable=wrong-import-order # pylint: disable=wrong-import-position import astroid # isort:skip diff --git a/scripts/pylint_extensions_test.py b/scripts/pylint_extensions_test.py index c6780c7954f8..936c2e68dc67 100644 --- a/scripts/pylint_extensions_test.py +++ b/scripts/pylint_extensions_test.py @@ -29,10 +29,6 @@ from . import pylint_extensions -_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') -sys.path.insert(0, _PYLINT_PATH) - # Since these module needs to be imported after adding Pylint path, # we need to disable isort for the below lines to prevent import # order errors. From 86647a8f5e37a558742143c4a621f7755c47da3f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 17:46:34 +0530 Subject: [PATCH 068/141] remove pylint path --- scripts/backend_tests.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index 91a18f26a9ae..92297274c993 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -42,7 +42,6 @@ THIRD_PARTY_DIR = os.path.join(CURR_DIR, 'third_party') DIRS_TO_ADD_TO_SYS_PATH = [ - os.path.join(OPPIA_TOOLS_DIR, 'pylint-1.9.4'), os.path.join( OPPIA_TOOLS_DIR, 'google_appengine_1.9.67', 'google_appengine'), os.path.join(OPPIA_TOOLS_DIR, 'webtest-2.0.33'), From 7c47253584fcb2d72ee58aedc2f25fff6ebf5ae8 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 18:21:49 +0530 Subject: [PATCH 069/141] add path --- scripts/docstrings_checker.py | 4 ++++ scripts/pylint_extensions_test.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/scripts/docstrings_checker.py b/scripts/docstrings_checker.py index e90809c4b210..fea279db5bb4 100644 --- a/scripts/docstrings_checker.py +++ b/scripts/docstrings_checker.py @@ -24,6 +24,10 @@ import python_utils +_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') +sys.path.insert(0, _PYLINT_PATH) + # pylint: disable=wrong-import-order # pylint: disable=wrong-import-position import astroid # isort:skip diff --git a/scripts/pylint_extensions_test.py b/scripts/pylint_extensions_test.py index 936c2e68dc67..c6780c7954f8 100644 --- a/scripts/pylint_extensions_test.py +++ b/scripts/pylint_extensions_test.py @@ -29,6 +29,10 @@ from . import pylint_extensions +_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') +sys.path.insert(0, _PYLINT_PATH) + # Since these module needs to be imported after adding Pylint path, # we need to disable isort for the below lines to prevent import # order errors. From efc69b051801ffd32aae1b87cfcad3617cf8aed1 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 19:50:02 +0530 Subject: [PATCH 070/141] add pylint path --- scripts/docstrings_checker_test.py | 16 ++++++++++++++-- scripts/pylint_extensions.py | 24 +++++++++++++++++++----- 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/scripts/docstrings_checker_test.py b/scripts/docstrings_checker_test.py index 29356d8e4d4e..28636dcc6ad9 100644 --- a/scripts/docstrings_checker_test.py +++ b/scripts/docstrings_checker_test.py @@ -19,15 +19,27 @@ import ast import contextlib +import os +import sys import unittest +_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') +sys.path.insert(0, _PYLINT_PATH) + +# Since these module needs to be imported after adding Pylint path, +# we need to disable isort for the below lines to prevent import +# order errors. + +# pylint: disable=wrong-import-order # pylint: disable=wrong-import-position -import astroid +import astroid # isort:skip -from . import docstrings_checker +from . import docstrings_checker # isort:skip from pylint.checkers import utils # isort:skip # pylint: enable=wrong-import-position +# pylint: enable=wrong-import-order class ASTDocstringsCheckerTest(unittest.TestCase): diff --git a/scripts/pylint_extensions.py b/scripts/pylint_extensions.py index 91d1edf53bad..d38ae630dd3f 100644 --- a/scripts/pylint_extensions.py +++ b/scripts/pylint_extensions.py @@ -19,16 +19,30 @@ """ from __future__ import absolute_import # pylint: disable=import-only-modules +import os import re +import sys -import astroid -from pylint import checkers -from pylint import interfaces -from pylint.checkers import typecheck -from pylint.checkers import utils as checker_utils +_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') +sys.path.insert(0, _PYLINT_PATH) + +# Since these module needs to be imported after adding Pylint path, +# we need to disable isort for the below lines to prevent import +# order errors. + +# pylint: disable=wrong-import-order +# pylint: disable=wrong-import-position +import astroid # isort:skip +from pylint import checkers # isort:skip +from pylint import interfaces # isort:skip +from pylint.checkers import typecheck # isort:skip +from pylint.checkers import utils as checker_utils # isort:skip import python_utils # isort:skip from . import docstrings_checker # isort:skip +# pylint: enable=wrong-import-position +# pylint: enable=wrong-import-order def read_from_node(node): From 834bffab2c246a54e1696835c0aa931f72c39e4a Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 20:33:59 +0530 Subject: [PATCH 071/141] fix --- scripts/docstrings_checker_test.py | 17 +---------------- scripts/pylint_extensions.py | 11 +---------- scripts/pylint_extensions_test.py | 13 ------------- 3 files changed, 2 insertions(+), 39 deletions(-) diff --git a/scripts/docstrings_checker_test.py b/scripts/docstrings_checker_test.py index 28636dcc6ad9..36b2feda03cc 100644 --- a/scripts/docstrings_checker_test.py +++ b/scripts/docstrings_checker_test.py @@ -19,27 +19,12 @@ import ast import contextlib -import os -import sys import unittest -_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') -sys.path.insert(0, _PYLINT_PATH) - -# Since these module needs to be imported after adding Pylint path, -# we need to disable isort for the below lines to prevent import -# order errors. - -# pylint: disable=wrong-import-order -# pylint: disable=wrong-import-position -import astroid # isort:skip - from . import docstrings_checker # isort:skip +import astroid # isort:skip from pylint.checkers import utils # isort:skip -# pylint: enable=wrong-import-position -# pylint: enable=wrong-import-order class ASTDocstringsCheckerTest(unittest.TestCase): diff --git a/scripts/pylint_extensions.py b/scripts/pylint_extensions.py index d38ae630dd3f..3e3c09ae2c80 100644 --- a/scripts/pylint_extensions.py +++ b/scripts/pylint_extensions.py @@ -19,17 +19,9 @@ """ from __future__ import absolute_import # pylint: disable=import-only-modules -import os import re -import sys -_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') -sys.path.insert(0, _PYLINT_PATH) - -# Since these module needs to be imported after adding Pylint path, -# we need to disable isort for the below lines to prevent import -# order errors. +from . import docstrings_checker # isort:skip # pylint: disable=wrong-import-order # pylint: disable=wrong-import-position @@ -40,7 +32,6 @@ from pylint.checkers import utils as checker_utils # isort:skip import python_utils # isort:skip -from . import docstrings_checker # isort:skip # pylint: enable=wrong-import-position # pylint: enable=wrong-import-order diff --git a/scripts/pylint_extensions_test.py b/scripts/pylint_extensions_test.py index c6780c7954f8..2a74020e98df 100644 --- a/scripts/pylint_extensions_test.py +++ b/scripts/pylint_extensions_test.py @@ -20,8 +20,6 @@ """Unit tests for scripts/pylint_extensions.""" from __future__ import absolute_import # pylint: disable=import-only-modules -import os -import sys import tempfile import unittest @@ -29,20 +27,9 @@ from . import pylint_extensions -_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') -sys.path.insert(0, _PYLINT_PATH) - -# Since these module needs to be imported after adding Pylint path, -# we need to disable isort for the below lines to prevent import -# order errors. -# pylint: disable=wrong-import-position -# pylint: disable=wrong-import-order import astroid # isort:skip from pylint import testutils # isort:skip from pylint import lint # isort:skip -# pylint: enable=wrong-import-position -# pylint: enable=wrong-import-order class ExplicitKeywordArgsCheckerTests(unittest.TestCase): From 0186a8a0caf0db3e887b613eb2c8c60b3d344eb6 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 21:28:45 +0530 Subject: [PATCH 072/141] fix --- scripts/backend_tests.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index 92297274c993..91a18f26a9ae 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -42,6 +42,7 @@ THIRD_PARTY_DIR = os.path.join(CURR_DIR, 'third_party') DIRS_TO_ADD_TO_SYS_PATH = [ + os.path.join(OPPIA_TOOLS_DIR, 'pylint-1.9.4'), os.path.join( OPPIA_TOOLS_DIR, 'google_appengine_1.9.67', 'google_appengine'), os.path.join(OPPIA_TOOLS_DIR, 'webtest-2.0.33'), From 326eda833cecfa2be7d11714e5cb54c8e14e513c Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 22:34:21 +0530 Subject: [PATCH 073/141] use vurtual env --- .circleci/config.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 70de23ab5f74..f24351b23d5c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -113,10 +113,15 @@ jobs: <<: *restore_cache - run: sudo pip install pyyaml - run: sudo pip install webtest + - run: + name: Install virtual environment + command: | + sudo pip install pipenv + pipenv install - run: name: Run backend tests command: | - python -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests + pipenv python -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests - run: <<: *install_cc - run: From 4f4b3c25545ba4bd089bbdbc5e72965c549cdac4 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Wed, 28 Aug 2019 22:50:53 +0530 Subject: [PATCH 074/141] fix --- .circleci/config.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f24351b23d5c..76a1e373c1f2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -116,12 +116,13 @@ jobs: - run: name: Install virtual environment command: | - sudo pip install pipenv - pipenv install + sudo pip install virtualenv + virtualenv venv + source venv/bin/activate - run: name: Run backend tests command: | - pipenv python -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests + python -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests - run: <<: *install_cc - run: From ad9fbaec08913fd08264f257af387d6a0cd2f5ba Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 00:00:37 +0530 Subject: [PATCH 075/141] fix --- scripts/run_e2e_tests.py | 36 +++++++++++++++--------------------- scripts/start.py | 4 ++-- 2 files changed, 17 insertions(+), 23 deletions(-) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 60452b2aa9af..a54e95cb4c26 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -78,13 +78,10 @@ def cleanup(): time.sleep(1) if os.path.isdir('../protractor-screenshots'): - python_utils.PRINT('') - python_utils.PRINT( - 'Note: If ADD_SCREENSHOT_REPORTER is set to true in') - python_utils.PRINT( - 'core/tests/protractor.conf.js, you can view screenshots') - python_utils.PRINT('of the failed tests in ../protractor-screenshots/') - python_utils.PRINT('') + common.print_string_after_two_new_lines([ + 'Note: If ADD_SCREENSHOT_REPORTER is set to true in', + 'core/tests/protractor.conf.js, you can view screenshots', + 'of the failed tests in ../protractor-screenshots/']) python_utils.PRINT('Done!') @@ -101,13 +98,10 @@ def main(argv=None): parsed_args.skip_install, parsed_args.run_minified_tests) if not common.is_port_close(8181): - python_utils.PRINT('') - python_utils.PRINT( - 'There is already a server running on localhost:8181.') - python_utils.PRINT( - 'Please terminate it before running the end-to-end tests.') - python_utils.PRINT('Exiting.') - python_utils.PRINT('') + common.print_string_after_two_new_lines([ + 'There is already a server running on localhost:8181.', + 'Please terminate it before running the end-to-end tests.', + 'Exiting.']) sys.exit(1) # Forces the cleanup function to run on exit. @@ -136,7 +130,7 @@ def main(argv=None): # Inside this loop the STDOUT will be redirected to the file. # The end='' is needed to avoid double line breaks. python_utils.PRINT( - re.sub('\'DEV_MODE\': .*', constants_env_variable, line), + re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') build.main() app_yaml_filepath = 'app_dev.yaml' @@ -150,7 +144,7 @@ def main(argv=None): '--versions.chrome', '2.41']) subprocess.call([ 'node_modules/.bin/webdriver-manager', 'start', - '--versions.chrome 2.41', '--detach --quiet']) + '--versions.chrome', '2.41', '--detach', '--quiet']) # Start a selenium process. The program sends thousands of lines of useless # info logs to stderr so we discard them. @@ -160,11 +154,11 @@ def main(argv=None): background_processes.append(subprocess.Popen([ 'node_modules/.bin/webdriver-manager', 'start', '2>/dev/null'])) # Start a demo server. - background_processes.append(subprocess.Popen([ - 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, - '--host=0.0.0.0', '--port=9001', '--clear_datastore=yes', - '--dev_appserver_log_level=critical', '--log_level=critical', - '--skip_sdk_update_check=true', app_yaml_filepath])) + background_processes.append(subprocess.Popen( + 'python %s/dev_appserver.py --host=0.0.0.0 --port=9001 ' + '--clear_datastore=yes --dev_appserver_log_level=critical ' + '--log_level=critical --skip_sdk_update_check=true %s' % ( + common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath), shell=True)) # Wait for the servers to come up. while common.is_port_close(4444) or common.is_port_close(9001): diff --git a/scripts/start.py b/scripts/start.py index 159dee0cd57b..fe8110a84d6c 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -101,7 +101,7 @@ def main(argv=None): # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub( - '\'DEV_MODE\': .*', constants_env_variable, line), end='') + r'\'DEV_MODE\': .*', constants_env_variable, line), end='') build.main(argv=['--prod_env', '--enable_watcher']) app_yaml_filepath = 'app.yaml' else: @@ -112,7 +112,7 @@ def main(argv=None): # The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub( - '\'DEV_MODE\': .*', constants_env_variable, line), end='') + r'\'DEV_MODE\': .*', constants_env_variable, line), end='') build.main(argv=['--enable_watcher']) app_yaml_filepath = 'app_dev.yaml' From 34efc74c77ff90f4c0ad2bd8ab5ea22b71c12dd6 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 00:14:45 +0530 Subject: [PATCH 076/141] add url in travis yaml --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index c926cd2f7d0f..f6a73ea88042 100644 --- a/.travis.yml +++ b/.travis.yml @@ -81,6 +81,7 @@ before_install: - pip install codecov - pip install pyyaml - export CHROME_BIN=/usr/bin/google-chrome-stable +- export CHROME_SOURCE_URL=https://github.com/webnicer/chrome-downloads/raw/master/x64.deb/google-chrome-stable_67.0.3396.99-1_amd64.deb - export DISPLAY=:99.0 - bash -e /etc/init.d/xvfb start - pip install future From 2d3d64218df94c376e154c83f7f2654d2dd2a660 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 02:02:45 +0530 Subject: [PATCH 077/141] test --- scripts/docstrings_checker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/docstrings_checker.py b/scripts/docstrings_checker.py index fea279db5bb4..7a1e65516764 100644 --- a/scripts/docstrings_checker.py +++ b/scripts/docstrings_checker.py @@ -28,6 +28,7 @@ _PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') sys.path.insert(0, _PYLINT_PATH) +raise Exception(sys.path) # pylint: disable=wrong-import-order # pylint: disable=wrong-import-position import astroid # isort:skip From dcf6ec36938bd3c6f791f8253a9253d1a0216c87 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 02:38:27 +0530 Subject: [PATCH 078/141] test --- .circleci/config.yml | 1 + scripts/docstrings_checker.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 76a1e373c1f2..4a0341f2a9b2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,6 +17,7 @@ anchor_for_installing_dependencies: &install_dependencies command: | sudo pip install future sudo pip install psutil + sudo pip uninstall pylint python -m scripts.setup python -m scripts.setup_gae diff --git a/scripts/docstrings_checker.py b/scripts/docstrings_checker.py index 7a1e65516764..fea279db5bb4 100644 --- a/scripts/docstrings_checker.py +++ b/scripts/docstrings_checker.py @@ -28,7 +28,6 @@ _PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') sys.path.insert(0, _PYLINT_PATH) -raise Exception(sys.path) # pylint: disable=wrong-import-order # pylint: disable=wrong-import-position import astroid # isort:skip From b532ba0e448e9f5a92e808b07d28e542a53ebd60 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 02:53:54 +0530 Subject: [PATCH 079/141] fix --- scripts/create_expression_parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 3e1ba6f36acf..a2d9646ffbb7 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -58,7 +58,7 @@ def main(): # Inside this loop the STDOUT will be redirected to the file. # The end='' is needed to avoid double line breaks. python_utils.PRINT( - re.sub(r'^})();\s*$', '}]);', line), end='') + re.sub(r'^\}\)\(\);\s*$', '}]);', line), end='') python_utils.PRINT('Done!') From f58f27a47c58ee05e1fe9a6807d198f3d8946430 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 02:55:00 +0530 Subject: [PATCH 080/141] test --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4a0341f2a9b2..9bc2d828ebcd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,9 +15,9 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | + sudo pip install --upgrade pip sudo pip install future sudo pip install psutil - sudo pip uninstall pylint python -m scripts.setup python -m scripts.setup_gae From 93b7cf4ba16ec05ead88126dcf3da4dffd9df598 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:06:20 +0530 Subject: [PATCH 081/141] test --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9bc2d828ebcd..78ebeb8aa0b6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - sudo pip install --upgrade pip + sudo pip install --upgrade pip --user sudo pip install future sudo pip install psutil python -m scripts.setup From 01970a5cb5c3bc6d885b3dedbc3db460a3a9600f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:12:07 +0530 Subject: [PATCH 082/141] test --- .circleci/config.yml | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 78ebeb8aa0b6..2e1ba6896c82 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,9 +15,9 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - sudo pip install --upgrade pip --user - sudo pip install future - sudo pip install psutil + pip install --upgrade pip --user + pip install future --user + pip install psutil --user python -m scripts.setup python -m scripts.setup_gae @@ -56,7 +56,7 @@ jobs: - checkout - run: <<: *install_dependencies - - run: sudo pip install pyyaml + - run: pip install pyyaml --user - run: name: Run lint tests # All the python scripts should behave as modules. Files like the @@ -91,7 +91,7 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml + - run: pip install pyyaml --user - run: name: Run frontend tests command: | @@ -99,7 +99,7 @@ jobs: - run: name: Generate frontend coverage report command: | - sudo pip install codecov + pip install codecov --user codecov --file ../karma_coverage_reports/lcov.info -F frontend when: on_success @@ -112,14 +112,8 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml - - run: sudo pip install webtest - - run: - name: Install virtual environment - command: | - sudo pip install virtualenv - virtualenv venv - source venv/bin/activate + - run: pip install pyyaml --user + - run: pip install webtest --user - run: name: Run backend tests command: | @@ -129,7 +123,7 @@ jobs: - run: name: Generate backend coverage report command: | - sudo pip install codecov + pip install codecov --user codecov -F backend ./cc-test-reporter format-coverage -t coverage.py coverage.xml ./cc-test-reporter sum-coverage coverage/codeclimate.json From eda28561ff8d0b84213f0b0a0b9638c18e151c7b Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:15:36 +0530 Subject: [PATCH 083/141] test --- .circleci/config.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2e1ba6896c82..ec944fdcb6e1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,9 +15,9 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - pip install --upgrade pip --user - pip install future --user - pip install psutil --user + pip2 install --upgrade pip --user + pip2 install future --user + pip2 install psutil --user python -m scripts.setup python -m scripts.setup_gae @@ -56,7 +56,7 @@ jobs: - checkout - run: <<: *install_dependencies - - run: pip install pyyaml --user + - run: pip2 install pyyaml --user - run: name: Run lint tests # All the python scripts should behave as modules. Files like the @@ -91,7 +91,7 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: pip install pyyaml --user + - run: pip2 install pyyaml --user - run: name: Run frontend tests command: | @@ -99,7 +99,7 @@ jobs: - run: name: Generate frontend coverage report command: | - pip install codecov --user + pip2 install codecov --user codecov --file ../karma_coverage_reports/lcov.info -F frontend when: on_success @@ -112,8 +112,8 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: pip install pyyaml --user - - run: pip install webtest --user + - run: pip2 install pyyaml --user + - run: pip2 install webtest --user - run: name: Run backend tests command: | @@ -123,7 +123,7 @@ jobs: - run: name: Generate backend coverage report command: | - pip install codecov --user + pip2 install codecov --user codecov -F backend ./cc-test-reporter format-coverage -t coverage.py coverage.xml ./cc-test-reporter sum-coverage coverage/codeclimate.json From 92d39d59a17c1bf9991ac36400c0ed23bffe8713 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:16:57 +0530 Subject: [PATCH 084/141] test --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ec944fdcb6e1..26ac9c2c85e7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,6 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - pip2 install --upgrade pip --user pip2 install future --user pip2 install psutil --user python -m scripts.setup From 98b324b5dd5e169747c084337f928aec221edde6 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:28:23 +0530 Subject: [PATCH 085/141] test --- .circleci/config.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 26ac9c2c85e7..0a42d6a07742 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,6 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | + pip2 install --upgrade virtualenv pip2 install future --user pip2 install psutil --user python -m scripts.setup @@ -116,7 +117,7 @@ jobs: - run: name: Run backend tests command: | - python -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests + python2 -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests - run: <<: *install_cc - run: From 4cc089c9942ed7549a2c45470eb2e3c90ec7ce93 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:31:04 +0530 Subject: [PATCH 086/141] test --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0a42d6a07742..880e037da97a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - pip2 install --upgrade virtualenv + pip2 install --upgrade virtualenv --user pip2 install future --user pip2 install psutil --user python -m scripts.setup From 2f2871c3d245b209876cd2174e135a841b6e90a6 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:39:35 +0530 Subject: [PATCH 087/141] tes --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 880e037da97a..a175aa9bc6fe 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,6 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - pip2 install --upgrade virtualenv --user pip2 install future --user pip2 install psutil --user python -m scripts.setup @@ -114,6 +113,7 @@ jobs: <<: *restore_cache - run: pip2 install pyyaml --user - run: pip2 install webtest --user + - run: pip2 install configparser --user - run: name: Run backend tests command: | From 14d2659eb8c89c2ac83de4706e2f52c1b6bc9ab4 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:50:00 +0530 Subject: [PATCH 088/141] fi --- .circleci/config.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a175aa9bc6fe..15abdfe2772e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,8 +15,8 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - pip2 install future --user - pip2 install psutil --user + sudo pip install future + sudo pip install psutil python -m scripts.setup python -m scripts.setup_gae @@ -55,7 +55,7 @@ jobs: - checkout - run: <<: *install_dependencies - - run: pip2 install pyyaml --user + - run: sudo pip install pyyaml - run: name: Run lint tests # All the python scripts should behave as modules. Files like the @@ -90,7 +90,7 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: pip2 install pyyaml --user + - run: sudo pip install pyyaml - run: name: Run frontend tests command: | @@ -98,7 +98,7 @@ jobs: - run: name: Generate frontend coverage report command: | - pip2 install codecov --user + sudo pip install codecov codecov --file ../karma_coverage_reports/lcov.info -F frontend when: on_success @@ -111,9 +111,9 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: pip2 install pyyaml --user - - run: pip2 install webtest --user - - run: pip2 install configparser --user + - run: sudo pip install pyyaml + - run: sudo pip install webtest + - run: sudo pip install configparser - run: name: Run backend tests command: | @@ -123,7 +123,7 @@ jobs: - run: name: Generate backend coverage report command: | - pip2 install codecov --user + sudo pip install codecov codecov -F backend ./cc-test-reporter format-coverage -t coverage.py coverage.xml ./cc-test-reporter sum-coverage coverage/codeclimate.json From 862490c75024bdb7743e56577d942583f46e8b36 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 03:57:35 +0530 Subject: [PATCH 089/141] fix --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 15abdfe2772e..03835ea78e3c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -117,7 +117,7 @@ jobs: - run: name: Run backend tests command: | - python2 -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests + python -m scripts.run_backend_tests --generate_coverage_report --exclude_load_tests - run: <<: *install_cc - run: From 1ec120f206eb526c2ae77ef123c295b3940d2c81 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 04:05:41 +0530 Subject: [PATCH 090/141] test --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 03835ea78e3c..41bb2288300d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,6 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | + sudo python -m pip install --upgrade virtualenv sudo pip install future sudo pip install psutil python -m scripts.setup From 11d891daebd67a693da0af361463f8b7ad0172ed Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 29 Aug 2019 15:18:09 +0530 Subject: [PATCH 091/141] fix --- .circleci/config.yml | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 41bb2288300d..6729576e07a9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,9 +15,10 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - sudo python -m pip install --upgrade virtualenv - sudo pip install future - sudo pip install psutil + sudo pip install pipenv + pipenv shell + pip install future + pip install psutil python -m scripts.setup python -m scripts.setup_gae @@ -56,7 +57,7 @@ jobs: - checkout - run: <<: *install_dependencies - - run: sudo pip install pyyaml + - run: pip install pyyaml - run: name: Run lint tests # All the python scripts should behave as modules. Files like the @@ -91,7 +92,7 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml + - run: pip install pyyaml - run: name: Run frontend tests command: | @@ -99,7 +100,7 @@ jobs: - run: name: Generate frontend coverage report command: | - sudo pip install codecov + pip install codecov codecov --file ../karma_coverage_reports/lcov.info -F frontend when: on_success @@ -112,9 +113,9 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml - - run: sudo pip install webtest - - run: sudo pip install configparser + - run: pip install pyyaml + - run: pip install webtest + - run: pip install configparser - run: name: Run backend tests command: | @@ -124,7 +125,7 @@ jobs: - run: name: Generate backend coverage report command: | - sudo pip install codecov + pip install codecov codecov -F backend ./cc-test-reporter format-coverage -t coverage.py coverage.xml ./cc-test-reporter sum-coverage coverage/codeclimate.json From ce7d11402b2034761ba1607a5d51107e708b6a6a Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 00:26:47 +0530 Subject: [PATCH 092/141] fix --- .circleci/config.yml | 21 ++++++++++----------- python_utils.py | 2 +- scripts/run_backend_tests.py | 1 - 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6729576e07a9..7ea9c56f32b3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,10 +15,8 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - sudo pip install pipenv - pipenv shell - pip install future - pip install psutil + sudo pip install future + sudo pip install psutil python -m scripts.setup python -m scripts.setup_gae @@ -57,7 +55,7 @@ jobs: - checkout - run: <<: *install_dependencies - - run: pip install pyyaml + - run: sudo pip install pyyaml - run: name: Run lint tests # All the python scripts should behave as modules. Files like the @@ -92,7 +90,7 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: pip install pyyaml + - run: sudo pip install pyyaml - run: name: Run frontend tests command: | @@ -100,7 +98,7 @@ jobs: - run: name: Generate frontend coverage report command: | - pip install codecov + sudo pip install codecov codecov --file ../karma_coverage_reports/lcov.info -F frontend when: on_success @@ -113,9 +111,10 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: pip install pyyaml - - run: pip install webtest - - run: pip install configparser + - run: sudo pip install pyyaml + - run: sudo pip install webtest + - run: sudo pip install configparser + - run: sudo pip install coverage - run: name: Run backend tests command: | @@ -125,7 +124,7 @@ jobs: - run: name: Generate backend coverage report command: | - pip install codecov + sudo pip install codecov codecov -F backend ./cc-test-reporter format-coverage -t coverage.py coverage.xml ./cc-test-reporter sum-coverage coverage/codeclimate.json diff --git a/python_utils.py b/python_utils.py index 09d0696ad917..8393426d2d4c 100644 --- a/python_utils.py +++ b/python_utils.py @@ -22,7 +22,7 @@ import os import sys -_FUTURE_PATH = os.path.join('third_party', 'future-0.17.1') +_FUTURE_PATH = os.path.join(os.getcwd(), 'third_party', 'future-0.17.1') sys.path.insert(0, _FUTURE_PATH) # pylint: disable=wrong-import-position diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index bb00a4f63b77..fba2e0b64bd4 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -70,7 +70,6 @@ def main(argv=None): 'node_modules/webpack/bin/webpack.js', '--config', 'webpack.dev.config.ts']) - sys.path.append(os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.33')) backend_tests.main(argv=argv) if parsed_args.generate_coverage_report: From 6f04c1b73aed35edc2386b23bc6aff6e4b765db1 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 03:39:23 +0530 Subject: [PATCH 093/141] fix test --- scripts/build_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_test.py b/scripts/build_test.py index ed7fb741e4cc..4cfc0bcdc85b 100644 --- a/scripts/build_test.py +++ b/scripts/build_test.py @@ -993,6 +993,6 @@ def mock_check_call(cmd, **unused_kwargs): % (build.WEBPACK_FILE, build.WEBPACK_PROD_CONFIG)) with self.swap(subprocess, 'check_call', mock_check_call): - build.main_using_webpack() + build.build_using_webpack() # pylint: enable=protected-access From 767d97e1ef2d97c1f4be9c553fdd499c5cabb799 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 04:12:09 +0530 Subject: [PATCH 094/141] fix --- scripts/common.py | 3 ++- scripts/create_expression_parser.py | 2 +- scripts/run_backend_tests.py | 4 +++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index abe49834730d..80bcb3855b2c 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -39,6 +39,7 @@ GOOGLE_CLOUD_SDK_HOME = os.path.join( OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/google-cloud-sdk') NODE_PATH = os.path.join(OPPIA_TOOLS_DIR, 'node-10.15.3') +NODE_MODULES_PATH = os.path.join(CURR_DIR, 'node_modules') FRONTEND_DIR = 'core/templates/dev/head' NPM_PATH = os.path.join(NODE_PATH, 'bin/npm') os.environ['PATH'] = '%s/bin:' % NODE_PATH + os.environ['PATH'] @@ -277,7 +278,7 @@ def install_npm_library(library, version, path): """ python_utils.PRINT( 'Checking whether %s is installed in %s' % (library, path)) - if not os.path.exists('node_modules/%s' % library): + if not os.path.exists(os.path.join(NODE_MODULES_PATH, library)): python_utils.PRINT('Installing %s' % library) subprocess.call([NPM_PATH, 'install', '%s@%s' % (library, version)]) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index a2d9646ffbb7..ea0387494e91 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -41,7 +41,7 @@ def main(): common.install_npm_library('pegjs', '0.8.0', common.OPPIA_TOOLS_DIR) subprocess.call([ - 'node_modules/pegjs/bin/pegjs', + os.path.join(common.NODE_MODULES_PATH, 'pegjs/bin/pegjs'), expression_parser_definition, expression_parser_js]) for line in fileinput.input(files=[expression_parser_js], inplace=True): diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index fba2e0b64bd4..0fbc9830abcf 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -63,7 +63,9 @@ def main(argv=None): # Compile typescript files. python_utils.PRINT('Compiling typescript...') - subprocess.call(['node_modules/typescript/bin/tsc', '--project', '.']) + subprocess.call([ + os.path.join(common.NODE_MODULES_PATH, 'typescript/bin/tsc'), + '--project', '.']) python_utils.PRINT('Compiling webpack...') subprocess.call([ From 8e432436fa13e7e1a3fd50c530eb2a39145e2f02 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 21:45:34 +0530 Subject: [PATCH 095/141] fix --- scripts/common.py | 6 +++--- scripts/run_e2e_tests.py | 8 ++++---- scripts/run_performance_tests.py | 6 +++--- scripts/start.py | 6 +++--- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index 80bcb3855b2c..b5e5dc52da6a 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -186,8 +186,8 @@ def ensure_release_scripts_folder_exists_and_is_up_to_date(): subprocess.call(['git', 'pull', remote_alias]) -def is_port_close(port): - """Checks if no process is listening to the port. +def is_port_open(port): + """Checks if a process is listening to the port. Args: port: int. The port number. @@ -197,7 +197,7 @@ def is_port_close(port): """ with contextlib.closing( socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: - return bool(s.connect_ex(('localhost', port))) + return bool(not s.connect_ex(('localhost', port))) # Credits: https://stackoverflow.com/a/20691431/11755830 diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index bafadb41d3f5..ec4fd5d7bafb 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -74,7 +74,7 @@ def cleanup(): # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. - while not common.is_port_close(4444) or not common.is_port_close(9001): + while common.is_port_open(4444) or common.is_port_open(9001): time.sleep(1) if os.path.isdir('../protractor-screenshots'): @@ -97,14 +97,14 @@ def main(argv=None): install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - if not common.is_port_close(8181): + if common.is_port_open(8181): common.print_string_after_two_new_lines([ 'There is already a server running on localhost:8181.', 'Please terminate it before running the end-to-end tests.', 'Exiting.']) sys.exit(1) - if not common.is_port_close(9001): + if common.is_port_open(9001): common.print_string_after_two_new_lines([ 'There is already a server running on localhost:9001.', 'Please terminate it before running the end-to-end tests.', @@ -168,7 +168,7 @@ def main(argv=None): common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath), shell=True)) # Wait for the servers to come up. - while common.is_port_close(4444) or common.is_port_close(9001): + while not common.is_port_open(4444) or not common.is_port_open(9001): time.sleep(1) # Delete outdated screenshots. diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index d466ba8a0372..457a615d39a4 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -52,7 +52,7 @@ def cleanup(): # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. - while not common.is_port_close(9501): + while common.is_port_open(9501): time.sleep(1) python_utils.PRINT('Done!') @@ -84,7 +84,7 @@ def main(argv=None): install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - if not common.is_port_close(8181): + if common.is_port_open(8181): common.print_string_after_two_new_lines([ 'There is already a server running on localhost:8181', 'Please terminate it before running the performance tests.', @@ -110,7 +110,7 @@ def main(argv=None): '--skip_sdk_update_check=true', 'app_dev.yaml']) # Wait for the servers to come up. - while common.is_port_close(9501): + while not common.is_port_open(9501): time.sleep(1) # Install xvfb if not on travis, Used in frontend, e2e tests and performance diff --git a/scripts/start.py b/scripts/start.py index fe8110a84d6c..c5cd75b9327e 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -60,7 +60,7 @@ def cleanup(): common.print_string_after_two_new_lines([ 'INFORMATION', 'Cleaning up the servers.']) - while not common.is_port_close(8181): + while common.is_port_open(8181): time.sleep(1) @@ -81,7 +81,7 @@ def main(argv=None): python_utils.PRINT('Oppia setup complete!') # Check that there isn't a server already running. - if not common.is_port_close(8181): + if common.is_port_open(8181): common.print_string_after_two_new_lines([ 'WARNING', 'Could not start new server. There is already an existing server', @@ -143,7 +143,7 @@ def main(argv=None): enable_console_arg, app_yaml_filepath), shell=True)) # Wait for the servers to come up. - while common.is_port_close(8181): + while not common.is_port_open(8181): time.sleep(1) os_info = os.uname() From fabf5170137f60efde974a11a85914173d0bcc96 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 21:57:18 +0530 Subject: [PATCH 096/141] address comments --- scripts/common.py | 14 +++++++------- scripts/install_third_party_libs.py | 6 +++--- scripts/pylint_extensions.py | 17 ++++++----------- utils.py | 4 ++-- 4 files changed, 18 insertions(+), 23 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index b5e5dc52da6a..bcb454412976 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -200,9 +200,9 @@ def is_port_open(port): return bool(not s.connect_ex(('localhost', port))) -# Credits: https://stackoverflow.com/a/20691431/11755830 def kill_process(port): """Kills a process that is listening to a specific port. + Credits: https://stackoverflow.com/a/20691431/11755830 Args: port: int. The port number. @@ -237,10 +237,10 @@ def recursive_chown(path, uid, gid): uid: int. Owner ID to be set. gid: int. Group ID to be set. """ - for root, dirs, files in os.walk(path): - for directory in dirs: + for root, directories, filenames in os.walk(path): + for directory in directories: os.chown(os.path.join(root, directory), uid, gid) - for filename in files: + for filename in filenames: os.chown(os.path.join(root, filename), uid, gid) @@ -251,10 +251,10 @@ def recursive_chmod(path, mode): path: str. The path for which mode would be set. mode: int. The mode to be set. """ - for root, dirs, files in os.walk(path): - for directory in dirs: + for root, directories, filenames in os.walk(path): + for directory in directories: os.chmod(os.path.join(root, directory), mode) - for filename in files: + for filename in filenames: os.chmod(os.path.join(root, filename), mode) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index b2920bd43195..f1e1aaf01624 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -80,13 +80,13 @@ def pip_install(package, version, install_path): if hasattr(pip, 'main'): pip.main(args=[ 'install', '%s==%s' % (package, version), '--target', install_path]) - # For pip version > 10. + # For pip version >= 10. else: - # pylint: disable=no-name-in-module, import-error + # pylint: disable=no-name-in-module, import-error import pip._internal pip._internal.main(args=[ # pylint: disable=protected-access 'install', '%s==%s' % (package, version), '--target', install_path]) - # pylint: enable=no-name-in-module, import-error + # pylint: enable=no-name-in-module, import-error def install_skulpt(argv): diff --git a/scripts/pylint_extensions.py b/scripts/pylint_extensions.py index 3e3c09ae2c80..91d1edf53bad 100644 --- a/scripts/pylint_extensions.py +++ b/scripts/pylint_extensions.py @@ -21,19 +21,14 @@ import re -from . import docstrings_checker # isort:skip - -# pylint: disable=wrong-import-order -# pylint: disable=wrong-import-position -import astroid # isort:skip -from pylint import checkers # isort:skip -from pylint import interfaces # isort:skip -from pylint.checkers import typecheck # isort:skip -from pylint.checkers import utils as checker_utils # isort:skip +import astroid +from pylint import checkers +from pylint import interfaces +from pylint.checkers import typecheck +from pylint.checkers import utils as checker_utils import python_utils # isort:skip -# pylint: enable=wrong-import-position -# pylint: enable=wrong-import-order +from . import docstrings_checker # isort:skip def read_from_node(node): diff --git a/utils.py b/utils.py index 73fb3d2313e5..5fc1d184462c 100644 --- a/utils.py +++ b/utils.py @@ -105,8 +105,8 @@ def get_exploration_components_from_dir(dir_path): dir_path_array = dir_path_array[:-1] dir_path_length = len(dir_path_array) - for root, dirs, files in os.walk(dir_path): - for directory in dirs: + for root, directories, files in os.walk(dir_path): + for directory in directories: if root == dir_path and directory != 'assets': raise Exception( 'The only directory in %s should be assets/' % dir_path) From 8b7bb5a3acb478eac2db36d5ed84bebbcd3f8a6f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 22:11:16 +0530 Subject: [PATCH 097/141] fix --- scripts/common.py | 17 +++++++++-------- scripts/create_expression_parser.py | 2 +- scripts/install_third_party_libs.py | 2 +- scripts/run_backend_tests.py | 2 +- scripts/run_e2e_tests.py | 6 +++--- scripts/run_frontend_tests.py | 4 ++-- scripts/run_performance_tests.py | 2 +- scripts/run_presubmit_checks.py | 4 ++-- scripts/run_tests.py | 2 +- scripts/setup.py | 4 ++-- scripts/start.py | 12 ++++++------ scripts/vagrant_lock.py | 2 +- 12 files changed, 30 insertions(+), 29 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index bcb454412976..a58dcb0689ed 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -258,8 +258,8 @@ def recursive_chmod(path, mode): os.chmod(os.path.join(root, filename), mode) -def print_string_after_two_new_lines(strings): - """Prints each string after two new lines. +def print_each_string_after_two_new_lines(strings): + """Prints the given strings, separating adjacent strings with two newlines. Args: strings: list(str). The strings to print. @@ -268,19 +268,20 @@ def print_string_after_two_new_lines(strings): python_utils.PRINT('%s\n' % string) -def install_npm_library(library, version, path): +def install_npm_library(library_name, version, path): """Installs the npm library after ensuring its not already installed. Args: - library: str. The library name. + library_name: str. The library name. version: str. The library version. path: str. The installation path for the library. """ python_utils.PRINT( - 'Checking whether %s is installed in %s' % (library, path)) - if not os.path.exists(os.path.join(NODE_MODULES_PATH, library)): - python_utils.PRINT('Installing %s' % library) - subprocess.call([NPM_PATH, 'install', '%s@%s' % (library, version)]) + 'Checking whether %s is installed in %s' % (library_name, path)) + if not os.path.exists(os.path.join(NODE_MODULES_PATH, library_name)): + python_utils.PRINT('Installing %s' % library_name) + subprocess.call([ + NPM_PATH, 'install', '%s@%s' % (library_name, version)]) class CD(python_utils.OBJECT): diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index ea0387494e91..115bc1d31d85 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""It produces the expression parser.""" +"""This script produces the expression parser.""" from __future__ import absolute_import # pylint: disable=import-only-modules import fileinput diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index f1e1aaf01624..a34ed3c62ce5 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -54,7 +54,7 @@ def pip_install(package, version, install_path): python_utils.PRINT('Checking if pip is installed on the local machine') import pip except ImportError: - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'Pip is required to install Oppia dependencies, but pip wasn\'t ' 'found', 'on your local machine.', diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 0fbc9830abcf..d6643abbc562 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""It runs all the (Python) backend tests, in parallel.""" +"""This script runs all the (Python) backend tests, in parallel.""" from __future__ import absolute_import # pylint: disable=import-only-modules import argparse diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index ec4fd5d7bafb..659fc2617e94 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -78,7 +78,7 @@ def cleanup(): time.sleep(1) if os.path.isdir('../protractor-screenshots'): - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'Note: If ADD_SCREENSHOT_REPORTER is set to true in', 'core/tests/protractor.conf.js, you can view screenshots', 'of the failed tests in ../protractor-screenshots/']) @@ -98,14 +98,14 @@ def main(argv=None): parsed_args.skip_install, parsed_args.run_minified_tests) if common.is_port_open(8181): - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'There is already a server running on localhost:8181.', 'Please terminate it before running the end-to-end tests.', 'Exiting.']) sys.exit(1) if common.is_port_open(9001): - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'There is already a server running on localhost:9001.', 'Please terminate it before running the end-to-end tests.', 'Exiting.']) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index a3a4f211385c..007f73441554 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""It runs unit tests for frontend JavaScript code (using Karma).""" +"""This script runs unit tests for frontend JavaScript code (using Karma).""" from __future__ import absolute_import # pylint: disable=import-only-modules import argparse @@ -50,7 +50,7 @@ def main(argv=None): parsed_args, _ = _PARSER.parse_known_args(args=argv) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'View interactive frontend test coverage reports by navigating to', '../karma_coverage_reports', 'on your filesystem.', diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 457a615d39a4..a65567c76d9a 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -85,7 +85,7 @@ def main(argv=None): parsed_args.skip_install, parsed_args.run_minified_tests) if common.is_port_open(8181): - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'There is already a server running on localhost:8181', 'Please terminate it before running the performance tests.', 'Exiting.']) diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index b683acfef5b9..ae651e168317 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""It runs the following tests in all cases. +"""This script runs the following tests in all cases. - Javascript and Python Linting - Backend Python tests @@ -78,7 +78,7 @@ def main(argv=None): python_utils.PRINT('Frontend tests passed.') else: # If files in common.FRONTEND_DIR were not changed, skip the tests. - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'No frontend files were changed.', 'Skipped frontend tests']) diff --git a/scripts/run_tests.py b/scripts/run_tests.py index ccbee4537b4f..129545fed501 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""It runs all the tests, in this order: +"""This script runs all the tests, in this order: - Frontend Karma unit tests - Backend Python tests - End-to-end Protractor tests diff --git a/scripts/setup.py b/scripts/setup.py index 1d8becfa7af2..16aa3dd161fe 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -63,7 +63,7 @@ def test_python_version(): # path. os_info = os.uname() if os_info[0] != 'Darwin' and os_info[0] != 'Linux': - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'It looks like you are using Windows. If you have Python ' 'installed,', 'make sure it is in your PATH and that PYTHONPATH is set.', @@ -104,7 +104,7 @@ def main(): if os_info[0] != 'Darwin' and os_info[0] != 'Linux': # Node is a requirement for all installation scripts. Here, we check if # the OS supports node.js installation; if not, we exit with an error. - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'WARNING: Unsupported OS for installation of node.js.', 'If you are running this script on Windows, see the instructions', 'here regarding installation of node.js:', diff --git a/scripts/start.py b/scripts/start.py index c5cd75b9327e..3c074a68267a 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -57,7 +57,7 @@ def cleanup(): """Function for waiting for the servers to go down.""" - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'INFORMATION', 'Cleaning up the servers.']) while common.is_port_open(8181): @@ -82,7 +82,7 @@ def main(argv=None): # Check that there isn't a server already running. if common.is_port_open(8181): - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'WARNING', 'Could not start new server. There is already an existing server', 'running at port 8181.']) @@ -153,13 +153,13 @@ def main(argv=None): if list(filter( detect_virtualbox_pattern.match, os.listdir('/dev/disk/by-id/'))): - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'INFORMATION', 'Setting up a local development server. You can access this ' 'server', 'by navigating to localhost:8181 in a browser window.']) else: - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'INFORMATION', 'Setting up a local development server at localhost:8181. ' 'Opening a', @@ -168,7 +168,7 @@ def main(argv=None): background_processes.append( subprocess.Popen(['xdg-open', 'http://localhost:8181/'])) elif os_info[0] == 'Darwin' and not parsed_args.no_browser: - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'INFORMATION', 'Setting up a local development server at localhost:8181. ' 'Opening a', @@ -177,7 +177,7 @@ def main(argv=None): background_processes.append( subprocess.Popen(['open', 'http://localhost:8181/'])) else: - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'INFORMATION', 'Setting up a local development server. You can access this server', 'by navigating to localhost:8181 in a browser window.']) diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py index 64c28f8ba9be..38b787a6b453 100644 --- a/scripts/vagrant_lock.py +++ b/scripts/vagrant_lock.py @@ -44,7 +44,7 @@ def main(argv=None): sys.exit(0) if os.path.isfile(vagrant_lock_file): - common.print_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'Another setup instance is already running', 'Please wait for that instance to complete or terminate it', 'If you ran $0 twice on purpose, you can override this with ', From 057f2e6d405e6b922b7f012249138cfe9c624d31 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 22:30:04 +0530 Subject: [PATCH 098/141] fix node --- scripts/run_backend_tests.py | 4 ++-- scripts/run_e2e_tests.py | 27 ++++++++++++++++++--------- scripts/run_frontend_tests.py | 16 +++++++++------- scripts/setup.py | 6 +++--- scripts/start.py | 6 ++++-- 5 files changed, 36 insertions(+), 23 deletions(-) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index d6643abbc562..bd166a68f3fd 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -69,8 +69,8 @@ def main(argv=None): python_utils.PRINT('Compiling webpack...') subprocess.call([ - 'node_modules/webpack/bin/webpack.js', '--config', - 'webpack.dev.config.ts']) + os.path.join(common.NODE_MODULES_PATH, 'webpack', 'bin', 'webpack.js'), + '--config', 'webpack.dev.config.ts']) backend_tests.main(argv=argv) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 659fc2617e94..169bb8daf033 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -147,11 +147,11 @@ def main(argv=None): # The 'quiet' option prints only the necessary information about the server # start-up process. subprocess.call([ - 'node_modules/.bin/webdriver-manager', 'update', - '--versions.chrome', '2.41']) + os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), + 'update', '--versions.chrome', '2.41']) subprocess.call([ - 'node_modules/.bin/webdriver-manager', 'start', - '--versions.chrome', '2.41', '--detach', '--quiet']) + os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), + 'start', '--versions.chrome', '2.41', '--detach', '--quiet']) # Start a selenium process. The program sends thousands of lines of useless # info logs to stderr so we discard them. @@ -159,7 +159,8 @@ def main(argv=None): # level. background_processes = [] background_processes.append(subprocess.Popen([ - 'node_modules/.bin/webdriver-manager', 'start', '2>/dev/null'])) + os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), + 'start', '2>/dev/null'])) # Start a demo server. background_processes.append(subprocess.Popen( 'python %s/dev_appserver.py --host=0.0.0.0 --port=9001 ' @@ -183,12 +184,16 @@ def main(argv=None): if not parsed_args.browserstack: if not parsed_args.sharding or parsed_args.sharding_instances == '1': subprocess.call([ - 'node_modules/protractor/bin/protractor', + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), 'core/tests/protractor.conf.js', '--suite', parsed_args.suite, '--params.devMode="%s"' % dev_mode]) else: subprocess.call([ - 'node_modules/protractor/bin/protractor', + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), 'core/tests/protractor.conf.js', '--capabilities.shardTestFiles=%s' % parsed_args.sharding, '--capabilities.maxInstances=%s' @@ -198,12 +203,16 @@ def main(argv=None): python_utils.PRINT('Running the tests on browserstack...') if not parsed_args.sharding or parsed_args.sharding_instances == '1': subprocess.call([ - 'node_modules/protractor/bin/protractor', + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), 'core/tests/protractor-browserstack.conf.js', '--suite', parsed_args.suite, '--params.devMode="%s"' % dev_mode]) else: subprocess.call([ - 'node_modules/protractor/bin/protractor', + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), 'core/tests/protractor-browserstack.conf.js', '--capabilities.shardTestFiles=%s' % parsed_args.sharding, '--capabilities.maxInstances=%s' diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 007f73441554..779a32d2a692 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -60,12 +60,13 @@ def main(argv=None): if xvfb_prefix: subprocess.call([ - xvfb_prefix, 'node_modules/karma/bin/karma', 'start', + xvfb_prefix, os.path.join( + common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), 'start', 'core/tests/karma.conf.ts']) else: subprocess.call([ - 'node_modules/karma/bin/karma', 'start', - 'core/tests/karma.conf.ts']) + os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), + 'start', 'core/tests/karma.conf.ts']) if parsed_args.run_minified_tests is True: python_utils.PRINT('Running test in production environment') @@ -74,12 +75,13 @@ def main(argv=None): if xvfb_prefix: subprocess.call([ - xvfb_prefix, 'node_modules/karma/bin/karma', 'start', - 'core/tests/karma.conf.ts', '--prodEnv']) + xvfb_prefix, + os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), + 'start', 'core/tests/karma.conf.ts', '--prodEnv']) else: subprocess.call([ - 'node_modules/karma/bin/karma', 'start', - 'core/tests/karma.conf.ts', '--prodEnv']) + os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), + 'start', 'core/tests/karma.conf.ts', '--prodEnv']) python_utils.PRINT('Done!') diff --git a/scripts/setup.py b/scripts/setup.py index 16aa3dd161fe..56e2933ff34d 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -98,7 +98,7 @@ def main(): # as $PYTHONPATH). create_directory(common.OPPIA_TOOLS_DIR) create_directory('third_party/') - create_directory('node_modules/') + create_directory(common.NODE_MODULES_PATH) os_info = os.uname() if os_info[0] != 'Darwin' and os_info[0] != 'Linux': @@ -142,8 +142,8 @@ def main(): # Change ownership of node_modules. # Note: on some machines, these commands seem to take quite a long time. - common.recursive_chown('node_modules/', os.getuid(), -1) - common.recursive_chmod('node_modules/', 744) + common.recursive_chown(common.NODE_MODULES_PATH, os.getuid(), -1) + common.recursive_chmod(common.NODE_MODULES_PATH, 744) # Adjust path to support the default Chrome locations for Unix, Windows and # Mac OS. diff --git a/scripts/start.py b/scripts/start.py index 3c074a68267a..2add9fce5a95 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -124,13 +124,15 @@ def main(argv=None): background_processes = [] if not parsed_args.prod_env: background_processes.append(subprocess.Popen([ - '%s/bin/node' % common.NODE_PATH, 'node_modules/gulp/bin/gulp.js', + os.path.join(common.NODE_PATH, 'bin', 'node'), + os.path.join(common.NODE_MODULES_PATH, 'gulp', 'bin', 'gulp.js'), 'watch'])) # In prod mode webpack is launched through scripts/build.py python_utils.PRINT('Compiling webpack...') background_processes.append(subprocess.Popen([ - 'node_modules/webpack/bin/webpack.js', + os.path.join( + common.NODE_MODULES_PATH, 'webpack', 'bin', 'webpack.js'), '--config', 'webpack.dev.config.ts', '--watch'])) # Give webpack few seconds to do the initial compilation. time.sleep(10) From 8003bfe03813217ff3e746aedd07ae9f5adb4e14 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 23:06:00 +0530 Subject: [PATCH 099/141] fix --- scripts/create_expression_parser.py | 10 ++++---- scripts/install_third_party_libs.py | 36 +++++++++++------------------ scripts/run_e2e_tests.py | 16 ++++++------- scripts/run_performance_tests.py | 2 +- scripts/setup.py | 8 +++---- scripts/setup_gae.py | 4 ++-- scripts/start.py | 8 +++---- 7 files changed, 38 insertions(+), 46 deletions(-) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 115bc1d31d85..88e1dacab0c5 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -45,8 +45,9 @@ def main(): expression_parser_definition, expression_parser_js]) for line in fileinput.input(files=[expression_parser_js], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + # Inside this loop the STDOUT will be redirected to the file, + # expression_parser_js. The end='' is needed to avoid double line + # breaks. python_utils.PRINT( re.sub( r'module\.exports.*$', @@ -55,8 +56,9 @@ def main(): line), end='') for line in fileinput.input(files=[expression_parser_js], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + # Inside this loop the STDOUT will be redirected to the file, + # expression_parser_js. The end='' is needed to avoid double line + # breaks. python_utils.PRINT( re.sub(r'^\}\)\(\);\s*$', '}]);', line), end='') diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index a34ed3c62ce5..ba8104658e7b 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -56,8 +56,7 @@ def pip_install(package, version, install_path): except ImportError: common.print_each_string_after_two_new_lines([ 'Pip is required to install Oppia dependencies, but pip wasn\'t ' - 'found', - 'on your local machine.', + 'found on your local machine.', 'Please see \'Installing Oppia\' on the Oppia developers\' wiki ' 'page:']) @@ -74,7 +73,7 @@ def pip_install(package, version, install_path): python_utils.PRINT( 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28' 'Windows%29') - sys.exit(1) + raise Exception # For pip version < 10. if hasattr(pip, 'main'): @@ -113,6 +112,8 @@ def install_skulpt(argv): if not os.path.exists( os.path.join(common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0')): python_utils.PRINT('Downloading Skulpt') + skulpt_filepath = os.path.join( + common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0', 'skulpt', 'skulpt.py') os.chdir(common.OPPIA_TOOLS_DIR) os.mkdir('skulpt-0.10.0') os.chdir('skulpt-0.10.0') @@ -129,29 +130,23 @@ def install_skulpt(argv): # and generating documentation and are not necessary when building # Skulpt. for line in fileinput.input( - files=[os.path.join( - common.OPPIA_TOOLS_DIR, - 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + files=[skulpt_filepath], inplace=True): + # Inside this loop the STDOUT will be redirected to the file, + # skulpt.py. The end='' is needed to avoid double line breaks. python_utils.PRINT( line.replace('ret = test()', 'ret = 0'), end='') for line in fileinput.input( - files=[os.path.join( - common.OPPIA_TOOLS_DIR, - 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + files=[skulpt_filepath], inplace=True): + # Inside this loop the STDOUT will be redirected to the file, + # skulpt.py. The end='' is needed to avoid double line breaks. python_utils.PRINT( line.replace(' doc()', ' pass#doc()'), end='') for line in fileinput.input( - files=[os.path.join( - common.OPPIA_TOOLS_DIR, - 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): + files=[skulpt_filepath], inplace=True): # This and the next command disable unit and compressed unit # tests for the compressed distribution of Skulpt. These # tests don't work on some Ubuntu environments and cause a @@ -163,17 +158,12 @@ def install_skulpt(argv): end='') for line in fileinput.input( - files=[os.path.join( - common.OPPIA_TOOLS_DIR, - 'skulpt-0.10.0/skulpt/skulpt.py')], inplace=True): + files=[skulpt_filepath], inplace=True): python_utils.PRINT( line.replace('ret = rununits(opt=True)', 'ret = 0'), end='') - subprocess.call([ - 'python', os.path.join( - common.OPPIA_TOOLS_DIR, 'skulpt-0.10.0/skulpt/skulpt.py'), - 'dist']) + subprocess.call(['python', skulpt_filepath, 'dist']) # Return to the Oppia root folder. os.chdir(common.CURR_DIR) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 169bb8daf033..f83e53e0788f 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -77,7 +77,7 @@ def cleanup(): while common.is_port_open(4444) or common.is_port_open(9001): time.sleep(1) - if os.path.isdir('../protractor-screenshots'): + if os.path.isdir(os.path.join('..', 'protractor-screenshots')): common.print_each_string_after_two_new_lines([ 'Note: If ADD_SCREENSHOT_REPORTER is set to true in', 'core/tests/protractor.conf.js, you can view screenshots', @@ -102,14 +102,14 @@ def main(argv=None): 'There is already a server running on localhost:8181.', 'Please terminate it before running the end-to-end tests.', 'Exiting.']) - sys.exit(1) + raise Exception if common.is_port_open(9001): common.print_each_string_after_two_new_lines([ 'There is already a server running on localhost:9001.', 'Please terminate it before running the end-to-end tests.', 'Exiting.']) - sys.exit(1) + raise Exception # Forces the cleanup function to run on exit. # Developers: note that at the end of this script, the cleanup() function at @@ -122,8 +122,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': false' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + # Inside this loop the STDOUT will be redirected to the file, + # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') @@ -134,8 +134,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': true' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + # Inside this loop the STDOUT will be redirected to the file, + # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') @@ -173,7 +173,7 @@ def main(argv=None): time.sleep(1) # Delete outdated screenshots. - if os.path.isdir('../protractor-screenshots'): + if os.path.isdir(os.path.join('..', 'protractor-screenshots')): shutil.rmtree('../protractor-screenshots') # Run the end-to-end tests. The conditional is used to run protractor diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index a65567c76d9a..3cc2cb3ff40b 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -89,7 +89,7 @@ def main(argv=None): 'There is already a server running on localhost:8181', 'Please terminate it before running the performance tests.', 'Exiting.']) - sys.exit(1) + raise Exception # Forces the cleanup function to run on exit. # Developers: note that at the end of this script, the cleanup() function at diff --git a/scripts/setup.py b/scripts/setup.py index 56e2933ff34d..b16e5c42045d 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -75,7 +75,7 @@ def test_python_version(): 'https://stackoverflow.com/questions/3701646/how-to-add-to-the-' 'pythonpath-in-windows-7']) # Exit when no suitable Python environment can be found. - sys.exit(1) + raise Exception def main(): @@ -90,7 +90,7 @@ def main(): python_utils.PRINT( 'WARNING This script should be run from the oppia/ root folder.') python_utils.PRINT('') - sys.exit(1) + raise Exception # Set COMMON_DIR to the absolute path of the directory above OPPIA_DIR. This # is necessary becaue COMMON_DIR (or subsequent variables which refer to it) @@ -111,7 +111,7 @@ def main(): 'https://github.com/oppia/oppia/wiki/Installing-Oppia-%28Windows' '%29', 'STATUS: Installation completed except for node.js. Exiting.']) - sys.exit(1) + raise Exception # Download and install node.js. python_utils.PRINT( @@ -184,7 +184,7 @@ def main(): '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome') else: python_utils.PRINT('Chrome is not found, stopping ...') - sys.exit(1) + raise Exception os.environ['CHROME_BIN'] = chrome_bin python_utils.PRINT('Environment setup completed.') diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 662a6c9c1185..5e759163472c 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -60,7 +60,7 @@ def main(): 'google_appengine_1.9.67.zip', filename='gae-download.zip') except Exception: python_utils.PRINT('Error downloading Google App Engine. Exiting.') - sys.exit(1) + raise Exception python_utils.PRINT('Download complete. Installing Google App Engine...') with zipfile.ZipFile('gae-download.zip', 'r') as zip_ref: zip_ref.extractall( @@ -83,7 +83,7 @@ def main(): filename='gcloud-sdk.tar.gz') except Exception: python_utils.PRINT('Error downloading Google Cloud SDK. Exiting.') - sys.exit(1) + raise Exception python_utils.PRINT('Download complete. Installing Google Cloud SDK...') tar = tarfile.open(name='gcloud-sdk.tar.gz') tar.extractall( diff --git a/scripts/start.py b/scripts/start.py index 2add9fce5a95..c5c1ebffc243 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -97,8 +97,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': false' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + # Inside this loop the STDOUT will be redirected to the file, + # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub( r'\'DEV_MODE\': .*', constants_env_variable, line), end='') @@ -108,8 +108,8 @@ def main(argv=None): constants_env_variable = '\'DEV_MODE\': true' for line in fileinput.input( files=['assets/constants.ts'], inplace=True): - # Inside this loop the STDOUT will be redirected to the file. - # The end='' is needed to avoid double line breaks. + # Inside this loop the STDOUT will be redirected to the file, + # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( re.sub( r'\'DEV_MODE\': .*', constants_env_variable, line), end='') From 25b64ade938ed399d1116aac4d0336416fcba468 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 31 Aug 2019 23:09:01 +0530 Subject: [PATCH 100/141] remove vagrant --- scripts/run_frontend_tests.py | 28 +++++------------ scripts/setup.py | 12 ------- scripts/start.py | 4 --- scripts/vagrant_lock.py | 59 ----------------------------------- 4 files changed, 7 insertions(+), 96 deletions(-) delete mode 100644 scripts/vagrant_lock.py diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 779a32d2a692..abfc37381df3 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -44,9 +44,7 @@ def main(argv=None): """Runs the frontend tests.""" setup.main() setup_gae.main() - xvfb_prefix = '' - if os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): - xvfb_prefix = '/usr/bin/xvfb-run' + parsed_args, _ = _PARSER.parse_known_args(args=argv) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -58,30 +56,18 @@ def main(argv=None): build.main() - if xvfb_prefix: - subprocess.call([ - xvfb_prefix, os.path.join( - common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), 'start', - 'core/tests/karma.conf.ts']) - else: - subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), - 'start', 'core/tests/karma.conf.ts']) + subprocess.call([ + os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), + 'start', 'core/tests/karma.conf.ts']) if parsed_args.run_minified_tests is True: python_utils.PRINT('Running test in production environment') build.main(argv=['--prod_env', '--minify_third_party_libs_only']) - if xvfb_prefix: - subprocess.call([ - xvfb_prefix, - os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), - 'start', 'core/tests/karma.conf.ts', '--prodEnv']) - else: - subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), - 'start', 'core/tests/karma.conf.ts', '--prodEnv']) + subprocess.call([ + os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), + 'start', 'core/tests/karma.conf.ts', '--prodEnv']) python_utils.PRINT('Done!') diff --git a/scripts/setup.py b/scripts/setup.py index b16e5c42045d..080c04d363a4 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -149,18 +149,6 @@ def main(): # Mac OS. if os.environ.get('TRAVIS'): chrome_bin = '/usr/bin/chromium-browser' - elif os.environ.get('VAGRANT') or os.path.isfile('/etc/is_vagrant_vm'): - # XVFB is required for headless testing in Vagrant. - subprocess.call([ - 'sudo', 'apt-get', 'install', 'xvfb', 'chromium-browser']) - chrome_bin = '/usr/bin/chromium-browser' - # Used in frontend and e2e tests. Only gets set if using Vagrant VM. - os.environ['XVFB_PREFIX'] = '/usr/bin/xvfb-run' - # Enforce proper ownership on oppia, oppia_tools, and node_modules or - # else NPM installs will fail. - common.recursive_chown('/home/vagrant/oppia', os.getuid(), -1) - common.recursive_chown('/home/vagrant/oppia_tools', os.getuid(), -1) - common.recursive_chown('/home/vagrant/node_modules', os.getuid(), -1) elif os.path.isfile('/usr/bin/google-chrome'): # Unix. chrome_bin = '/usr/bin/google-chrome' diff --git a/scripts/start.py b/scripts/start.py index c5c1ebffc243..73f29979db9e 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -34,7 +34,6 @@ from . import install_third_party_libs from . import setup from . import setup_gae -from . import vagrant_lock _PARSER = argparse.ArgumentParser() _PARSER.add_argument( @@ -66,9 +65,6 @@ def cleanup(): def main(argv=None): """Starts up a development server running Oppia.""" - if os.path.isfile('/etc/is_vagrant_vm'): - vagrant_lock.main() - setup.main() setup_gae.main() diff --git a/scripts/vagrant_lock.py b/scripts/vagrant_lock.py deleted file mode 100644 index 38b787a6b453..000000000000 --- a/scripts/vagrant_lock.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2019 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS-IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This file should not be invoked directly, but sourced from other sh scripts. - -Creates a lockfile to help with new user confusion when launching a vagrant -vm. See https://github.com/oppia/oppia/pull/2749 for details. - -It can be overridden by passing --nolock to start.sh. -""" -from __future__ import absolute_import # pylint: disable=import-only-modules - -import argparse -import os -import sys - -from . import clean -from . import common - - -def main(argv=None): - """Creates a lockfile.""" - vagrant_lock_file = './.lock' - - _parser = argparse.ArgumentParser() - _parser.add_argument( - '--nolock', - help='optional; if specified, skips creation of lockfile', - action='store_true') - parsed_args, _ = _parser.parse_known_args(args=argv) - if parsed_args.nolock: - clean.delete_file(vagrant_lock_file) - sys.exit(0) - - if os.path.isfile(vagrant_lock_file): - common.print_each_string_after_two_new_lines([ - 'Another setup instance is already running', - 'Please wait for that instance to complete or terminate it', - 'If you ran $0 twice on purpose, you can override this with ', - '--nolock']) - sys.exit(1) - else: - os.utime(vagrant_lock_file, None) - clean.delete_file(vagrant_lock_file) - - -if __name__ == '__main__': - main(argv=sys.argv) From 0492abefb2589e2f4ec81951b51b145898603ef1 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 01:11:55 +0530 Subject: [PATCH 101/141] fix --- scripts/common.py | 1 + scripts/install_third_party_libs.py | 14 +++-------- scripts/run_e2e_tests.py | 32 +++++++++++++++++-------- scripts/run_performance_tests.py | 21 +++++++++------- scripts/start.py | 37 +++++++++++++++++++---------- 5 files changed, 64 insertions(+), 41 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index a58dcb0689ed..e9c66ce42072 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -42,6 +42,7 @@ NODE_MODULES_PATH = os.path.join(CURR_DIR, 'node_modules') FRONTEND_DIR = 'core/templates/dev/head' NPM_PATH = os.path.join(NODE_PATH, 'bin/npm') +# Add path for node which is required by the node_modules. os.environ['PATH'] = '%s/bin:' % NODE_PATH + os.environ['PATH'] diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index ba8104658e7b..b4af7d752294 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -75,17 +75,9 @@ def pip_install(package, version, install_path): 'Windows%29') raise Exception - # For pip version < 10. - if hasattr(pip, 'main'): - pip.main(args=[ - 'install', '%s==%s' % (package, version), '--target', install_path]) - # For pip version >= 10. - else: - # pylint: disable=no-name-in-module, import-error - import pip._internal - pip._internal.main(args=[ # pylint: disable=protected-access - 'install', '%s==%s' % (package, version), '--target', install_path]) - # pylint: enable=no-name-in-module, import-error + subprocess.call([ + 'pip', 'install', '%s==%s' % (package, version), '--target', + install_path]) def install_skulpt(argv): diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index f83e53e0788f..de5602ac234c 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -66,15 +66,21 @@ help='Sets the number of parallel browsers to open while sharding', default='3') +PORT_NUMBER_FOR_SELENIUM_SERVER = 4444 +PORT_NUMBER_FOR_GAE_SERVER = 9001 +USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START = 8181 + def cleanup(): """Send a kill signal to the dev server and Selenium server.""" - common.kill_process(4444) - common.kill_process(9001) + common.kill_process(PORT_NUMBER_FOR_SELENIUM_SERVER) + common.kill_process(PORT_NUMBER_FOR_GAE_SERVER) # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. - while common.is_port_open(4444) or common.is_port_open(9001): + while common.is_port_open( + PORT_NUMBER_FOR_SELENIUM_SERVER) or common.is_port_open( + PORT_NUMBER_FOR_GAE_SERVER): time.sleep(1) if os.path.isdir(os.path.join('..', 'protractor-screenshots')): @@ -97,16 +103,18 @@ def main(argv=None): install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - if common.is_port_open(8181): + if common.is_port_open(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START): common.print_each_string_after_two_new_lines([ - 'There is already a server running on localhost:8181.', + 'There is already a server running on localhost:%s.' + % python_utils.UNICODE(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START), 'Please terminate it before running the end-to-end tests.', 'Exiting.']) raise Exception - if common.is_port_open(9001): + if common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): common.print_each_string_after_two_new_lines([ - 'There is already a server running on localhost:9001.', + 'There is already a server running on localhost:%s.' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), 'Please terminate it before running the end-to-end tests.', 'Exiting.']) raise Exception @@ -163,13 +171,17 @@ def main(argv=None): 'start', '2>/dev/null'])) # Start a demo server. background_processes.append(subprocess.Popen( - 'python %s/dev_appserver.py --host=0.0.0.0 --port=9001 ' + 'python %s/dev_appserver.py --host=0.0.0.0 --port=%s ' '--clear_datastore=yes --dev_appserver_log_level=critical ' '--log_level=critical --skip_sdk_update_check=true %s' % ( - common.GOOGLE_APP_ENGINE_HOME, app_yaml_filepath), shell=True)) + common.GOOGLE_APP_ENGINE_HOME, + python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), + app_yaml_filepath), shell=True)) # Wait for the servers to come up. - while not common.is_port_open(4444) or not common.is_port_open(9001): + while not common.is_port_open( + PORT_NUMBER_FOR_SELENIUM_SERVER) or not common.is_port_open( + PORT_NUMBER_FOR_GAE_SERVER): time.sleep(1) # Delete outdated screenshots. diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 3cc2cb3ff40b..385a14a92d68 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -45,14 +45,17 @@ help='If an argument is present then run test for that specific page. ' 'Otherwise run tests for all the pages sequentially.') +PORT_NUMBER_FOR_GAE_SERVER = 9501 +USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START = 8181 + def cleanup(): """Send a kill signal to the dev server.""" - common.kill_process(9501) + common.kill_process(PORT_NUMBER_FOR_GAE_SERVER) # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. - while common.is_port_open(9501): + while common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): time.sleep(1) python_utils.PRINT('Done!') @@ -84,9 +87,10 @@ def main(argv=None): install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) - if common.is_port_open(8181): + if common.is_port_open(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START): common.print_each_string_after_two_new_lines([ - 'There is already a server running on localhost:8181', + 'There is already a server running on localhost:%s' + % python_utils.UNICODE(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START), 'Please terminate it before running the performance tests.', 'Exiting.']) raise Exception @@ -105,12 +109,13 @@ def main(argv=None): # Start a demo server. background_process = subprocess.Popen([ 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, - '--host=0.0.0.0', '--port=9501', '--clear_datastore=yes', - '--dev_appserver_log_level=critical', '--log_level=critical', - '--skip_sdk_update_check=true', 'app_dev.yaml']) + '--host=0.0.0.0', + '--port=%s' % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), + '--clear_datastore=yes', '--dev_appserver_log_level=critical', + '--log_level=critical', '--skip_sdk_update_check=true', 'app_dev.yaml']) # Wait for the servers to come up. - while not common.is_port_open(9501): + while not common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): time.sleep(1) # Install xvfb if not on travis, Used in frontend, e2e tests and performance diff --git a/scripts/start.py b/scripts/start.py index 73f29979db9e..19e9951f90d4 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -53,13 +53,15 @@ help='optional; if specified, does not open a browser.', action='store_true') +PORT_NUMBER_FOR_GAE_SERVER = 8181 + def cleanup(): """Function for waiting for the servers to go down.""" common.print_each_string_after_two_new_lines([ 'INFORMATION', 'Cleaning up the servers.']) - while common.is_port_open(8181): + while common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): time.sleep(1) @@ -77,11 +79,12 @@ def main(argv=None): python_utils.PRINT('Oppia setup complete!') # Check that there isn't a server already running. - if common.is_port_open(8181): + if common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): common.print_each_string_after_two_new_lines([ 'WARNING', 'Could not start new server. There is already an existing server', - 'running at port 8181.']) + 'running at port %s.' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)]) parsed_args, _ = _PARSER.parse_known_args(args=argv) clear_datastore_arg = ( @@ -136,12 +139,14 @@ def main(argv=None): python_utils.PRINT('Starting GAE development server') background_processes.append(subprocess.Popen( 'python %s/dev_appserver.py %s %s --admin_host 0.0.0.0 --admin_port ' - '8000 --host 0.0.0.0 --port 8181 --skip_sdk_update_check true %s' % ( + '8000 --host 0.0.0.0 --port %s --skip_sdk_update_check true %s' % ( common.GOOGLE_APP_ENGINE_HOME, clear_datastore_arg, - enable_console_arg, app_yaml_filepath), shell=True)) + enable_console_arg, + python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), + app_yaml_filepath), shell=True)) # Wait for the servers to come up. - while not common.is_port_open(8181): + while not common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): time.sleep(1) os_info = os.uname() @@ -155,30 +160,38 @@ def main(argv=None): 'INFORMATION', 'Setting up a local development server. You can access this ' 'server', - 'by navigating to localhost:8181 in a browser window.']) + 'by navigating to localhost:%s in a browser window.' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)]) else: common.print_each_string_after_two_new_lines([ 'INFORMATION', - 'Setting up a local development server at localhost:8181. ' + 'Setting up a local development server at localhost:%s. ' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), 'Opening a', 'default browser window pointing to this server']) time.sleep(5) background_processes.append( - subprocess.Popen(['xdg-open', 'http://localhost:8181/'])) + subprocess.Popen([ + 'xdg-open', 'http://localhost:%s/' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)])) elif os_info[0] == 'Darwin' and not parsed_args.no_browser: common.print_each_string_after_two_new_lines([ 'INFORMATION', - 'Setting up a local development server at localhost:8181. ' + 'Setting up a local development server at localhost:%s. ' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), 'Opening a', 'default browser window pointing to this server.']) time.sleep(5) background_processes.append( - subprocess.Popen(['open', 'http://localhost:8181/'])) + subprocess.Popen([ + 'open', 'http://localhost:%s/' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)])) else: common.print_each_string_after_two_new_lines([ 'INFORMATION', 'Setting up a local development server. You can access this server', - 'by navigating to localhost:8181 in a browser window.']) + 'by navigating to localhost:%s in a browser window.' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)]) python_utils.PRINT('Done!') From fd168ab8eb9752cf7b17cb0fc47c650941cbfb6d Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 02:58:36 +0530 Subject: [PATCH 102/141] address comments --- scripts/common.py | 10 +++++----- scripts/create_expression_parser.py | 12 +++++++----- scripts/install_third_party_libs.py | 3 ++- scripts/run_backend_tests.py | 2 +- scripts/run_e2e_tests.py | 21 ++++++++++++--------- scripts/run_frontend_tests.py | 5 +++-- scripts/run_performance_tests.py | 15 ++++++++------- scripts/setup.py | 3 +-- scripts/setup_gae.py | 2 +- scripts/start.py | 4 ++-- 10 files changed, 42 insertions(+), 35 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index e9c66ce42072..2d51dc163145 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -33,15 +33,15 @@ RELEASE_BRANCH_NAME_PREFIX = 'release-' CURR_DIR = os.path.abspath(os.getcwd()) OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') -THIRD_PARTY_DIR = os.path.join('.', 'third_party') +THIRD_PARTY_DIR = os.path.join(CURR_DIR, 'third_party') GOOGLE_APP_ENGINE_HOME = os.path.join( - OPPIA_TOOLS_DIR, 'google_appengine_1.9.67/google_appengine') + OPPIA_TOOLS_DIR, 'google_appengine_1.9.67', 'google_appengine') GOOGLE_CLOUD_SDK_HOME = os.path.join( - OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0/google-cloud-sdk') + OPPIA_TOOLS_DIR, 'google-cloud-sdk-251.0.0', 'google-cloud-sdk') NODE_PATH = os.path.join(OPPIA_TOOLS_DIR, 'node-10.15.3') NODE_MODULES_PATH = os.path.join(CURR_DIR, 'node_modules') -FRONTEND_DIR = 'core/templates/dev/head' -NPM_PATH = os.path.join(NODE_PATH, 'bin/npm') +FRONTEND_DIR = os.path.join(CURR_DIR, 'core', 'templates', 'dev', 'head') +NPM_PATH = os.path.join(NODE_PATH, 'bin', 'npm') # Add path for node which is required by the node_modules. os.environ['PATH'] = '%s/bin:' % NODE_PATH + os.environ['PATH'] diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 88e1dacab0c5..e9b5a984f5ae 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -16,6 +16,7 @@ from __future__ import absolute_import # pylint: disable=import-only-modules import fileinput +import os import re import subprocess @@ -30,10 +31,11 @@ def main(): """Produces the expression parser.""" setup.main() - expression_parser_definition = ( - 'core/templates/dev/head/expressions/parser.pegjs') - expression_parser_js = ( - 'core/templates/dev/head/expressions/ExpressionParserService.js') + expression_parser_definition = os.path.join( + 'core', 'templates', 'dev', 'head', 'expressions', 'parser.pegjs') + expression_parser_js = os.path.join( + 'core', 'templates', 'dev', 'head', 'expressions', + 'ExpressionParserService.js') # Install the basic environment, e.g. nodejs. install_third_party_libs.main() @@ -41,7 +43,7 @@ def main(): common.install_npm_library('pegjs', '0.8.0', common.OPPIA_TOOLS_DIR) subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, 'pegjs/bin/pegjs'), + os.path.join(common.NODE_MODULES_PATH, 'pegjs', 'bin', 'pegjs'), expression_parser_definition, expression_parser_js]) for line in fileinput.input(files=[expression_parser_js], inplace=True): diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index b4af7d752294..0f26a8345f02 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -52,7 +52,8 @@ def pip_install(package, version, install_path): """ try: python_utils.PRINT('Checking if pip is installed on the local machine') - import pip + # Importing pip just to check if its installed. + import pip #pylint: disable=unused-variable except ImportError: common.print_each_string_after_two_new_lines([ 'Pip is required to install Oppia dependencies, but pip wasn\'t ' diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index bd166a68f3fd..20ccacc29b98 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -64,7 +64,7 @@ def main(argv=None): # Compile typescript files. python_utils.PRINT('Compiling typescript...') subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, 'typescript/bin/tsc'), + os.path.join(common.NODE_MODULES_PATH, 'typescript', 'bin', 'tsc'), '--project', '.']) python_utils.PRINT('Compiling webpack...') diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index de5602ac234c..1eb57fb49d31 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -129,7 +129,7 @@ def main(argv=None): python_utils.PRINT('Generating files for production mode...') constants_env_variable = '\'DEV_MODE\': false' for line in fileinput.input( - files=['assets/constants.ts'], inplace=True): + files=[os.path.join('assets', 'constants.ts')], inplace=True): # Inside this loop the STDOUT will be redirected to the file, # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( @@ -141,7 +141,7 @@ def main(argv=None): dev_mode = 'true' constants_env_variable = '\'DEV_MODE\': true' for line in fileinput.input( - files=['assets/constants.ts'], inplace=True): + files=[os.path.join('assets', 'constants.ts')], inplace=True): # Inside this loop the STDOUT will be redirected to the file, # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( @@ -186,7 +186,7 @@ def main(argv=None): # Delete outdated screenshots. if os.path.isdir(os.path.join('..', 'protractor-screenshots')): - shutil.rmtree('../protractor-screenshots') + shutil.rmtree(os.path.join('..', 'protractor-screenshots')) # Run the end-to-end tests. The conditional is used to run protractor # without any sharding parameters if it is disabled. This helps with @@ -199,14 +199,14 @@ def main(argv=None): os.path.join( common.NODE_MODULES_PATH, 'protractor', 'bin', 'protractor'), - 'core/tests/protractor.conf.js', '--suite', parsed_args.suite, - '--params.devMode="%s"' % dev_mode]) + os.path.join('core', 'tests', 'protractor.conf.js'), '--suite', + parsed_args.suite, '--params.devMode="%s"' % dev_mode]) else: subprocess.call([ os.path.join( common.NODE_MODULES_PATH, 'protractor', 'bin', 'protractor'), - 'core/tests/protractor.conf.js', + os.path.join('core', 'tests', 'protractor.conf.js'), '--capabilities.shardTestFiles=%s' % parsed_args.sharding, '--capabilities.maxInstances=%s' % parsed_args.sharding_instances, '--suite', parsed_args.suite, @@ -218,14 +218,17 @@ def main(argv=None): os.path.join( common.NODE_MODULES_PATH, 'protractor', 'bin', 'protractor'), - 'core/tests/protractor-browserstack.conf.js', '--suite', - parsed_args.suite, '--params.devMode="%s"' % dev_mode]) + os.path.join( + 'core', 'tests', 'protractor-browserstack.conf.js'), + '--suite', parsed_args.suite, + '--params.devMode="%s"' % dev_mode]) else: subprocess.call([ os.path.join( common.NODE_MODULES_PATH, 'protractor', 'bin', 'protractor'), - 'core/tests/protractor-browserstack.conf.js', + os.path.join( + 'core', 'tests', 'protractor-browserstack.conf.js'), '--capabilities.shardTestFiles=%s' % parsed_args.sharding, '--capabilities.maxInstances=%s' % parsed_args.sharding_instances, '--suite', parsed_args.suite, diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index abfc37381df3..af17ab92f696 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -58,7 +58,7 @@ def main(argv=None): subprocess.call([ os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), - 'start', 'core/tests/karma.conf.ts']) + 'start', os.path.join('core', 'tests', 'karma.conf.ts')]) if parsed_args.run_minified_tests is True: python_utils.PRINT('Running test in production environment') @@ -67,7 +67,8 @@ def main(argv=None): subprocess.call([ os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), - 'start', 'core/tests/karma.conf.ts', '--prodEnv']) + 'start', os.path.join('core', 'tests', 'karma.conf.ts'), + '--prodEnv']) python_utils.PRINT('Done!') diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 385a14a92d68..98c043006dcc 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -101,18 +101,19 @@ def main(argv=None): atexit.register(cleanup) browsermob_proxy_path = os.path.join( - common.OPPIA_TOOLS_DIR, 'browsermob-proxy-2.1.1/bin/browsermob-proxy') + common.OPPIA_TOOLS_DIR, 'browsermob-proxy-2.1.1', 'bin', + 'browsermob-proxy') # Change execute status of browsermob-proxy. common.recursive_chmod(browsermob_proxy_path, 744) # Start a demo server. - background_process = subprocess.Popen([ - 'python', '%s/dev_appserver.py' % common.GOOGLE_APP_ENGINE_HOME, - '--host=0.0.0.0', - '--port=%s' % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), - '--clear_datastore=yes', '--dev_appserver_log_level=critical', - '--log_level=critical', '--skip_sdk_update_check=true', 'app_dev.yaml']) + background_process = subprocess.Popen( + 'python %s/dev_appserver.py --host=0.0.0.0 --port=%s ' + '--clear_datastore=yes --dev_appserver_log_level=critical ' + '--log_level=critical --skip_sdk_update_check=true app_dev.yaml' % ( + common.GOOGLE_APP_ENGINE_HOME, + python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)), shell=True) # Wait for the servers to come up. while not common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): diff --git a/scripts/setup.py b/scripts/setup.py index 080c04d363a4..e0064afc8e98 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -19,7 +19,6 @@ import os import shutil -import subprocess import sys import tarfile @@ -97,7 +96,7 @@ def main(): # may use it in a situation where relative paths won't work as expected(such # as $PYTHONPATH). create_directory(common.OPPIA_TOOLS_DIR) - create_directory('third_party/') + create_directory(common.THIRD_PARTY_DIR) create_directory(common.NODE_MODULES_PATH) os_info = os.uname() diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 5e759163472c..8c3c3ce46208 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -37,7 +37,7 @@ def main(): sys.path.append(coverage_home) sys.path.append(common.GOOGLE_APP_ENGINE_HOME) sys.path.append( - os.path.join(common.GOOGLE_APP_ENGINE_HOME, 'lib/webob_0_9')) + os.path.join(common.GOOGLE_APP_ENGINE_HOME, 'lib', 'webob_0_9')) sys.path.append(os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.33')) # Delete old *.pyc files. diff --git a/scripts/start.py b/scripts/start.py index 19e9951f90d4..558e0caf3141 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -95,7 +95,7 @@ def main(argv=None): if parsed_args.prod_env: constants_env_variable = '\'DEV_MODE\': false' for line in fileinput.input( - files=['assets/constants.ts'], inplace=True): + files=[os.path.join('assets', 'constants.ts')], inplace=True): # Inside this loop the STDOUT will be redirected to the file, # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( @@ -106,7 +106,7 @@ def main(argv=None): else: constants_env_variable = '\'DEV_MODE\': true' for line in fileinput.input( - files=['assets/constants.ts'], inplace=True): + files=[os.path.join('assets', 'constants.ts')], inplace=True): # Inside this loop the STDOUT will be redirected to the file, # constants.ts. The end='' is needed to avoid double line breaks. python_utils.PRINT( From d89c3b052471c61ffd87c5438e0436918d9f361b Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 03:18:24 +0530 Subject: [PATCH 103/141] fix lint --- scripts/create_expression_parser.py | 1 + scripts/install_chrome_on_travis.py | 1 + scripts/install_third_party_libs.py | 1 + scripts/run_backend_tests.py | 1 + scripts/run_e2e_tests.py | 1 + scripts/run_frontend_tests.py | 1 + scripts/run_performance_tests.py | 1 + scripts/run_presubmit_checks.py | 1 + scripts/run_tests.py | 1 + scripts/setup.py | 1 + scripts/setup_gae.py | 1 + scripts/start.py | 1 + 12 files changed, 12 insertions(+) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index e9b5a984f5ae..10dfe28df304 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -14,6 +14,7 @@ """This script produces the expression parser.""" from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import fileinput import os diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py index dc34b0f5fc45..4ebb68af3fba 100644 --- a/scripts/install_chrome_on_travis.py +++ b/scripts/install_chrome_on_travis.py @@ -19,6 +19,7 @@ https://travis-ci.org/oppia/oppia/settings. """ from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import os import subprocess diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 0f26a8345f02..e9bb73e7dbaf 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -14,6 +14,7 @@ """Installation script for Oppia third-party libraries.""" from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse import fileinput diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 20ccacc29b98..50d86db9300c 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -14,6 +14,7 @@ """This script runs all the (Python) backend tests, in parallel.""" from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse import os diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 1eb57fb49d31..f6b975ea51e6 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -14,6 +14,7 @@ """Runs the end to end tests.""" from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse import atexit diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index af17ab92f696..5e25a716ebf6 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -14,6 +14,7 @@ """This script runs unit tests for frontend JavaScript code (using Karma).""" from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse import os diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 98c043006dcc..40249719c799 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -14,6 +14,7 @@ """Runs the performance tests.""" from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse import atexit diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index ae651e168317..7b6bddd4c1a4 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -24,6 +24,7 @@ enables a broken build to be detected as quickly as possible. """ from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse import sys diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 129545fed501..753b13e5cca0 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -23,6 +23,7 @@ enables a broken build to be detected as quickly as possible. """ from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import python_utils diff --git a/scripts/setup.py b/scripts/setup.py index e0064afc8e98..3d7ca6feb28c 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -16,6 +16,7 @@ scripts. Python execution environent set up for all scripts. """ from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import os import shutil diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 8c3c3ce46208..6b461bfea133 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -16,6 +16,7 @@ scripts. Python execution environment setup for scripts that require GAE. """ from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import os import sys diff --git a/scripts/start.py b/scripts/start.py index 558e0caf3141..00b4e2e4c4e8 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -17,6 +17,7 @@ server. """ from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse import atexit From 251669ad9df8fa79fe051950bde698b88922add7 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 04:02:22 +0530 Subject: [PATCH 104/141] fix --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5f34170cf60f..3bc3c8d7350f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -41,6 +41,7 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache + - run: sudo pip install pyyaml - run: <<: *install_dependencies - save_cache: From b07b0d9a24b8f57f036e7cf3d93f98fcfd18dde3 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 04:08:01 +0530 Subject: [PATCH 105/141] fix --- .circleci/config.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3bc3c8d7350f..22813cb650ee 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,6 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | + sudo pip install pyyaml sudo pip install future sudo pip install psutil python -m scripts.setup @@ -41,7 +42,6 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml - run: <<: *install_dependencies - save_cache: @@ -56,7 +56,6 @@ jobs: - checkout - run: <<: *install_dependencies - - run: sudo pip install pyyaml - run: name: Run lint tests # All the python scripts should behave as modules. Files like the @@ -76,7 +75,6 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml - run: name: Run typescript tests command: | @@ -92,7 +90,6 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml - run: name: Run frontend tests command: | @@ -113,7 +110,6 @@ jobs: - run: date +%F > date - restore_cache: <<: *restore_cache - - run: sudo pip install pyyaml - run: sudo pip install webtest - run: sudo pip install configparser - run: sudo pip install coverage From 03e56b7a7050a2ab3edf4c8f5ef55aa0999f21af Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 05:05:18 +0530 Subject: [PATCH 106/141] address comments --- scripts/common.py | 15 --------------- scripts/run_presubmit_checks.py | 13 ++++++++----- 2 files changed, 8 insertions(+), 20 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index 04ebc8c90f1c..acaac1866a57 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -215,21 +215,6 @@ def kill_process(port): process.send_signal(signal.SIGTERM) -def run_command(command): - """Runs a subprocess command. - - Args: - command: str. The command to be run. - - Returns: - str. The command output. - - Raises: - CalledProcessError. Raised when the command fails. - """ - return subprocess.check_output(command.split()) - - def recursive_chown(path, uid, gid): """Changes the owner and group id of all files in a path to the numeric uid and gid. diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 7b6bddd4c1a4..020dc40638e3 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -27,6 +27,7 @@ from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse +import subprocess import sys import python_utils @@ -51,12 +52,14 @@ def main(argv=None): python_utils.PRINT('Linting passed.') python_utils.PRINT('') - current_branch = common.run_command('git rev-parse --abbrev-ref HEAD') + current_branch = subprocess.check_output([ + 'git', 'rev-parse', '--abbrev-ref', 'HEAD']) # If the current branch exists on remote origin, matched_branch_num=1 # else matched_branch_num=0. - matched_branch_num = common.run_command( - 'git ls-remote --heads origin %s | wc -l' % current_branch) + matched_branch_num = subprocess.check_output([ + 'git', 'ls-remote', '--heads', 'origin', current_branch, '|', 'wc', + '-l']) # Set the origin branch to develop if it's not specified. parsed_args, _ = _PARSER.parse_known_args(args=argv) @@ -69,8 +72,8 @@ def main(argv=None): python_utils.PRINT('Comparing the current branch with %s' % branch) - all_changed_files = common.run_command( - 'git diff --cached --name-only --diff-filter=ACM %s' % branch) + all_changed_files = subprocess.check_output([ + 'git', 'diff', '--cached', '--name-only', '--diff-filter=ACM', branch]) if common.FRONTEND_DIR in all_changed_files: # Run frontend unit tests. From 770cd4fe83ff3dc35a5b706f082c585d4c9cce3b Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 05:29:54 +0530 Subject: [PATCH 107/141] fix --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 4577d815ba8e..20f6a005ead2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -92,6 +92,7 @@ install: - pushd $TRAVIS_BUILD_DIR - python -m scripts.setup - python -m scripts.setup_gae +- chown -R $(whoami) node_modules - python -m scripts.install_third_party_libs script: From fafbde27571f094cc04bdd01782c9d0da5ad50b5 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 05:36:17 +0530 Subject: [PATCH 108/141] fix --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 20f6a005ead2..ed2bb0211723 100644 --- a/.travis.yml +++ b/.travis.yml @@ -93,6 +93,7 @@ install: - python -m scripts.setup - python -m scripts.setup_gae - chown -R $(whoami) node_modules +- chmod -R 744 node_modules - python -m scripts.install_third_party_libs script: From 9ec9238272896b4c1a4aec7109a3f0905a199c74 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 05:42:12 +0530 Subject: [PATCH 109/141] fix --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index ed2bb0211723..dc2c5bb3f99e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -92,8 +92,8 @@ install: - pushd $TRAVIS_BUILD_DIR - python -m scripts.setup - python -m scripts.setup_gae -- chown -R $(whoami) node_modules -- chmod -R 744 node_modules +- sudo chown -R $(whoami) node_modules +- sudo chmod -R 744 node_modules - python -m scripts.install_third_party_libs script: From 33f38a7f500a25f2170ee54d0ca56ddee3b90dcc Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 05:52:06 +0530 Subject: [PATCH 110/141] fix --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index dc2c5bb3f99e..bf4bc723d2b0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -94,6 +94,7 @@ install: - python -m scripts.setup_gae - sudo chown -R $(whoami) node_modules - sudo chmod -R 744 node_modules +- export PATH=../oppia_tools/node-10.15.3/bin:$PATH - python -m scripts.install_third_party_libs script: From 35cfe608432d3ed018125e3417c58904979c7b95 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 1 Sep 2019 19:13:03 +0530 Subject: [PATCH 111/141] fix --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index bf4bc723d2b0..3a81d5afc0ac 100644 --- a/.travis.yml +++ b/.travis.yml @@ -94,6 +94,8 @@ install: - python -m scripts.setup_gae - sudo chown -R $(whoami) node_modules - sudo chmod -R 744 node_modules +- sudo chown -R $(whoami) $HOME/.config +- sudo chown -R $(whoami) $HOME/.cache - export PATH=../oppia_tools/node-10.15.3/bin:$PATH - python -m scripts.install_third_party_libs From 55a1a99963624703497e68d94e56d4f46ee4dee9 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 16:02:41 +0530 Subject: [PATCH 112/141] address comments --- scripts/run_backend_tests.py | 6 +++++- scripts/run_e2e_tests.py | 11 ++++++++++- scripts/run_frontend_tests.py | 11 ++++++++++- scripts/run_performance_tests.py | 13 ++++++++++++- scripts/run_presubmit_checks.py | 12 +++++++++++- scripts/start.py | 7 ++++++- 6 files changed, 54 insertions(+), 6 deletions(-) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 50d86db9300c..e9e501cc99b3 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -30,7 +30,11 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" + Run this script from the oppia root folder: + + python -m scripts.run_backend_tests""") + _PARSER.add_argument( '--generate_coverage_report', help='optional; if specified, generates a coverage report', diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index f6b975ea51e6..ea08257ed728 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -35,7 +35,16 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" + Run this script from the oppia root folder: + + python -m scripts.run_e2e_tests + + The root folder MUST be named 'oppia'. + + Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run + a single test or test suite.""") + _PARSER.add_argument( '--skip_install', help='optional; if specified, skips installing dependencies', diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 5e25a716ebf6..48c5cbcc0b0c 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -29,7 +29,16 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" + Run this script from the oppia root folder: + + python -m scripts.run_frontend_tests + + The root folder MUST be named 'oppia'. + + Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run + a single test or test suite.""") + _PARSER.add_argument( '--skip_install', help='optional; if specified, skips installing dependencies', diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 40249719c799..ad712403ee7e 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -31,7 +31,18 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" + The root folder MUST be named 'oppia'. + + Run all tests sequentially: + python -m scripts.run_performance_tests without args in order to run all + tests sequentially. + + Run test for a specific page: + python -m scripts.run_performance_tests --test_name=page_test + + page_test is the name of the file containing that test eg. splash_test.""") + _PARSER.add_argument( '--skip_install', help='optional; if specified, skips installing dependencies', diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 020dc40638e3..8eea56321170 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -37,7 +37,17 @@ from . import run_backend_tests from . import run_frontend_tests -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" + Run this script from the oppia root folder prior to opening a PR: + python -m scripts.run_presubmit_checks + + Set the origin branch to compare against by adding + --branch=your_branch or -b=your_branch + + By default, if the current branch tip exists on remote origin, + the current branch is compared against its tip on GitHub. + Otherwise it's compared against 'develop'.""") + _PARSER.add_argument( '--branch', '-b', help='optional; if specified, the origin branch to compare against.') diff --git a/scripts/start.py b/scripts/start.py index 00b4e2e4c4e8..ef31983f3d1a 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -36,7 +36,12 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" + Run the script from the oppia root folder: + python -m scripts.start + + Note that the root folder MUST be named 'oppia'.""") + _PARSER.add_argument( '--save_datastore', help='optional; if specified, does not clear the datastore.', From c604eaed7324367ebb5979a304de1fa4eb3a36a5 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 17:33:42 +0530 Subject: [PATCH 113/141] fix --- scripts/run_presubmit_checks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 8eea56321170..6a41677dd3b3 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -98,7 +98,7 @@ def main(argv=None): # Run backend tests. python_utils.PRINT('Running backend tests') - run_backend_tests.main() + run_backend_tests.main(argv=[]) python_utils.PRINT('Backend tests passed.') From 90b50d8470eeb5341415a436ab12191e76faec04 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 18:09:00 +0530 Subject: [PATCH 114/141] fix --- scripts/backend_tests.py | 4 ++-- scripts/build.py | 6 ++---- scripts/install_third_party_libs.py | 9 +++------ scripts/pre_commit_hook.py | 4 ++-- scripts/pre_commit_linter.py | 6 +++--- scripts/pre_push_hook.py | 4 ++-- scripts/run_backend_tests.py | 8 ++++---- scripts/run_e2e_tests.py | 6 +++--- scripts/run_frontend_tests.py | 6 +++--- scripts/run_performance_tests.py | 4 ++-- scripts/run_presubmit_checks.py | 6 +++--- scripts/start.py | 6 +++--- 12 files changed, 32 insertions(+), 37 deletions(-) diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py index 464cbfac4de9..e317c1a12e69 100644 --- a/scripts/backend_tests.py +++ b/scripts/backend_tests.py @@ -311,7 +311,7 @@ def main(argv=None): import dev_appserver dev_appserver.fix_sys_path() - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) if parsed_args.test_target and parsed_args.test_path: raise Exception('At most one of test_path and test_target ' 'should be specified.') @@ -452,4 +452,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/build.py b/scripts/build.py index 97ed3b942547..051c524d3557 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -1365,9 +1365,7 @@ def main(argv=None): dest='minify_third_party_libs_only') parser.add_argument( '--enable_watcher', action='store_true', default=False) - # We use parse_known_args() to ignore the extra arguments which maybe used - # while calling this method from other Python scripts. - options, _ = parser.parse_known_args(args=argv) + options = parser.parse_args(args=argv) # Regenerate /third_party/generated from scratch. safe_delete_directory_tree(THIRD_PARTY_GENERATED_DEV_DIR) build_third_party_libs(THIRD_PARTY_GENERATED_DEV_DIR) @@ -1396,4 +1394,4 @@ def main(argv=None): # The 'no coverage' pragma is used as this line is un-testable. This is because # it will only be called when build.py is used as a script. if __name__ == '__main__': # pragma: no cover - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index e9bb73e7dbaf..44aa084bbda2 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -92,10 +92,7 @@ def install_skulpt(argv): warning saying its dist command will not work properly without GitPython, but it does actually work due to the patches. """ - - # We use parse_known_args() to ignore the extra arguments which maybe used - # while calling this method from other Python scripts. - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt python_utils.PRINT('Checking whether Skulpt is installed in third_party') @@ -178,7 +175,7 @@ def maybe_install_dependencies( # Ensure that generated JS and CSS files are in place before running the # tests. python_utils.PRINT('Running build task with concatenation only') - build.main() + build.main(argv=[]) if run_minified_tests is True: python_utils.PRINT( @@ -247,4 +244,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index 92318ca6f9a7..40a4d9f4f39d 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -133,7 +133,7 @@ def main(argv=None): parser = argparse.ArgumentParser() parser.add_argument('--install', action='store_true', default=False, help='Install pre_commit_hook to the .git/hooks dir') - args, _ = parser.parse_known_args(args=argv) + args = parser.parse_args(args=argv) if args.install: _install_hook() return @@ -150,4 +150,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/pre_commit_linter.py b/scripts/pre_commit_linter.py index 54f6485e6412..503f0889046b 100644 --- a/scripts/pre_commit_linter.py +++ b/scripts/pre_commit_linter.py @@ -3169,11 +3169,11 @@ def _print_complete_summary_of_errors(): python_utils.PRINT(error_messages) -def main(): +def main(argv=None): """Main method for pre commit linter script that lints Python, JavaScript, HTML, and CSS files. """ - parsed_args = _PARSER.parse_args() + parsed_args = _PARSER.parse_args(args=argv) # Default mode is non-verbose mode, if arguments contains --verbose flag it # will be made True, which will represent verbose mode. verbose_mode_enabled = bool(parsed_args.verbose) @@ -3220,4 +3220,4 @@ def main(): if __name__ == '__main__': - main() + main(argv=sys.argv[1:]) diff --git a/scripts/pre_push_hook.py b/scripts/pre_push_hook.py index 83239b54e52e..ad3dadbf5fd7 100755 --- a/scripts/pre_push_hook.py +++ b/scripts/pre_push_hook.py @@ -365,7 +365,7 @@ def main(argv=None): parser.add_argument('url', nargs='?', help='provided by git before push') parser.add_argument('--install', action='store_true', default=False, help='Install pre_push_hook to the .git/hooks dir') - args, _ = parser.parse_known_args(args=argv) + args = parser.parse_args(args=argv) if args.install: _install_hook() return @@ -408,4 +408,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index e9e501cc99b3..29ac46b38f74 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -47,12 +47,12 @@ def main(argv=None): setup_gae.main() # Install third party dependencies. - install_third_party_libs.main() + install_third_party_libs.main(argv=[]) coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') coverage_path = os.path.join(coverage_home, 'coverage') - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) if parsed_args.generate_coverage_report: python_utils.PRINT( 'Checking whether coverage is installed in %s' @@ -64,7 +64,7 @@ def main(argv=None): 'coverage', '4.5.4', os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4')) - build.main() + build.main(argv=[]) # Compile typescript files. python_utils.PRINT('Compiling typescript...') @@ -94,4 +94,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index ea08257ed728..ea9cc5969183 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -109,7 +109,7 @@ def main(argv=None): if os.environ.get('TRAVIS'): install_chrome_on_travis.main() - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -157,7 +157,7 @@ def main(argv=None): python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.main() + build.main(argv=[]) app_yaml_filepath = 'app_dev.yaml' # Start a selenium server using chromedriver 2.41. @@ -249,4 +249,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 48c5cbcc0b0c..14de167cacbe 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -55,7 +55,7 @@ def main(argv=None): setup.main() setup_gae.main() - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) common.print_each_string_after_two_new_lines([ @@ -64,7 +64,7 @@ def main(argv=None): 'on your filesystem.', 'Running test in development environment']) - build.main() + build.main(argv=[]) subprocess.call([ os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), @@ -84,4 +84,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index ad712403ee7e..44e5f9bb6811 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -95,7 +95,7 @@ def main(argv=None): setup.main() setup_gae.main() - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -165,4 +165,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 6a41677dd3b3..fe331b93f2a9 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -58,7 +58,7 @@ def main(argv=None): # Run Javascript and Python linters. python_utils.PRINT('Linting files since the last commit') - pre_commit_linter.main() + pre_commit_linter.main(argv=[]) python_utils.PRINT('Linting passed.') python_utils.PRINT('') @@ -72,7 +72,7 @@ def main(argv=None): '-l']) # Set the origin branch to develop if it's not specified. - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) if parsed_args.branch: branch = parsed_args.branch elif matched_branch_num == '1': @@ -103,4 +103,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) diff --git a/scripts/start.py b/scripts/start.py index ef31983f3d1a..6374ae2b3064 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -80,7 +80,7 @@ def main(argv=None): atexit.register(cleanup) # Install third party dependencies. - install_third_party_libs.main() + install_third_party_libs.main(argv=[]) python_utils.PRINT('Oppia setup complete!') @@ -92,7 +92,7 @@ def main(argv=None): 'running at port %s.' % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)]) - parsed_args, _ = _PARSER.parse_known_args(args=argv) + parsed_args = _PARSER.parse_args(args=argv) clear_datastore_arg = ( '' if parsed_args.save_datastore else '--clear_datastore=true') enable_console_arg = ( @@ -206,4 +206,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv) + main(argv=sys.argv[1:]) From 5bdc8b1e1d25ab26bff4760669d3b095c9d290d3 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 18:57:17 +0530 Subject: [PATCH 115/141] fix --- .circleci/config.yml | 8 +----- python_utils.py | 7 +++-- scripts/install_third_party_libs.py | 42 ++++++++++++++++++++++------- 3 files changed, 39 insertions(+), 18 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 22813cb650ee..506f8603c840 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,11 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - sudo pip install pyyaml - sudo pip install future - sudo pip install psutil - python -m scripts.setup - python -m scripts.setup_gae + python -m scripts.install_third_party_libs anchor_for_installing_cc_test_reporter: &install_cc name: Install codeclimate test reporter @@ -62,7 +58,6 @@ jobs: # pre_commit_linter and third_party_size_check need to import other # Python files and that is only possible if we treat it as a module. command: | - python -m scripts.install_third_party_libs python -m scripts.third_party_size_check python -m scripts.pre_commit_linter --path=. --verbose @@ -78,7 +73,6 @@ jobs: - run: name: Run typescript tests command: | - python -m scripts.install_third_party_libs python -m scripts.typescript_checks frontend_tests: diff --git a/python_utils.py b/python_utils.py index 5acaca1ac7ea..ccd1d4143fa1 100644 --- a/python_utils.py +++ b/python_utils.py @@ -23,13 +23,16 @@ import os import sys -import yaml - _FUTURE_PATH = os.path.join(os.getcwd(), 'third_party', 'future-0.17.1') sys.path.insert(0, _FUTURE_PATH) +_YAML_PATH = os.path.join(os.getcwd(), '..', 'oppia_tools', 'pyyaml-5.1.2') +sys.path.insert(0, _YAML_PATH) + # pylint: disable=wrong-import-position # pylint: disable=wrong-import-order +import yaml # isort:skip + import builtins # isort:skip import future.utils # isort:skip import past.builtins # isort:skip diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 44aa084bbda2..62dc87c0ae1b 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -23,14 +23,39 @@ import subprocess import sys -import python_utils +# These libraries need to be installed before running or importing any script. +TOOLS_DIR = os.path.join('..', 'oppia_tools') +# Download and install pyyaml. +if not os.path.exists(os.path.join(TOOLS_DIR, 'pyyaml-5.1.2')): + subprocess.call([ + 'pip', 'install', 'pyyaml==5.1.2', '--target', + os.path.join(TOOLS_DIR, 'pyyaml-5.1.2')]) + +# Download and install future. +if not os.path.exists(os.path.join('third_party', 'future==0.17.1')): + subprocess.call([ + 'pip', 'install', 'future==0.17.1', '--target', + os.path.join('third_party', 'future-0.17.1')]) -from . import build -from . import common -from . import install_third_party -from . import pre_commit_hook -from . import pre_push_hook -from . import setup +# Download and install psutil. +if not os.path.exists(os.path.join(TOOLS_DIR, 'psutil==5.6.3')): + subprocess.call([ + 'pip', 'install', 'psutil==5.6.3', '--target', + os.path.join(TOOLS_DIR, 'psutil-5.6.3')]) + +# pylint: disable=wrong-import-position +# pylint: disable=wrong-import-order +import python_utils # isort:skip + +from . import build # isort:skip +from . import common # isort:skip +from . import install_third_party # isort:skip +from . import pre_commit_hook # isort:skip +from . import pre_push_hook # isort:skip +from . import setup # isort:skip +from . import setup_gae # isort:skip +# pylint: enable=wrong-import-order +# pylint: enable=wrong-import-position _PARSER = argparse.ArgumentParser() _PARSER.add_argument( @@ -203,8 +228,8 @@ def ensure_pip_library_is_installed(package, version, path): def main(argv=None): """Install third-party libraries for Oppia.""" setup.main() + setup_gae.main() pip_dependencies = [ - ('future', '0.17.1', common.THIRD_PARTY_DIR), ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR), ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR), ('pylint-quotes', '0.1.8', common.OPPIA_TOOLS_DIR), @@ -215,7 +240,6 @@ def main(argv=None): ('browsermob-proxy', '0.8.0', common.OPPIA_TOOLS_DIR), ('selenium', '3.13.0', common.OPPIA_TOOLS_DIR), ('PyGithub', '1.43.7', common.OPPIA_TOOLS_DIR), - ('psutil', '5.6.3', common.OPPIA_TOOLS_DIR), ] for package, version, path in pip_dependencies: From 891d08821a5f70afdff379f25734f7bf03c62f1f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 19:08:14 +0530 Subject: [PATCH 116/141] only one backend_tests.py --- scripts/backend_tests.py | 455 ----------------------------------- scripts/run_backend_tests.py | 425 +++++++++++++++++++++++++++++++- 2 files changed, 415 insertions(+), 465 deletions(-) delete mode 100644 scripts/backend_tests.py diff --git a/scripts/backend_tests.py b/scripts/backend_tests.py deleted file mode 100644 index e317c1a12e69..000000000000 --- a/scripts/backend_tests.py +++ /dev/null @@ -1,455 +0,0 @@ -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Script for running backend tests in parallel. - -This should not be run directly. Instead, navigate to the oppia/ folder and -execute: - - bash scripts/run_backend_tests.sh -""" -from __future__ import absolute_import # pylint: disable=import-only-modules -from __future__ import unicode_literals # pylint: disable=import-only-modules - -# Pylint has issues with the import order of argparse. -# pylint: disable=wrong-import-order -import argparse -import datetime -import importlib -import inspect -import os -import re -import subprocess -import sys -import threading -import time - -import python_utils -# pylint: enable=wrong-import-order - -CURR_DIR = os.path.abspath(os.getcwd()) -OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') -THIRD_PARTY_DIR = os.path.join(CURR_DIR, 'third_party') - -DIRS_TO_ADD_TO_SYS_PATH = [ - os.path.join(OPPIA_TOOLS_DIR, 'pylint-1.9.4'), - os.path.join( - OPPIA_TOOLS_DIR, 'google_appengine_1.9.67', 'google_appengine'), - os.path.join(OPPIA_TOOLS_DIR, 'webtest-2.0.33'), - os.path.join( - OPPIA_TOOLS_DIR, 'google_appengine_1.9.67', 'google_appengine', - 'lib', 'webob_0_9'), - os.path.join(OPPIA_TOOLS_DIR, 'browsermob-proxy-0.7.1'), - os.path.join(OPPIA_TOOLS_DIR, 'selenium-3.13.0'), - os.path.join(OPPIA_TOOLS_DIR, 'Pillow-6.0.0'), - CURR_DIR, - os.path.join(THIRD_PARTY_DIR, 'backports.functools_lru_cache-1.5'), - os.path.join(THIRD_PARTY_DIR, 'beautifulsoup4-4.7.1'), - os.path.join(THIRD_PARTY_DIR, 'bleach-3.1.0'), - os.path.join(THIRD_PARTY_DIR, 'callbacks-0.3.0'), - os.path.join(THIRD_PARTY_DIR, 'gae-cloud-storage-1.9.22.1'), - os.path.join(THIRD_PARTY_DIR, 'gae-mapreduce-1.9.22.0'), - os.path.join(THIRD_PARTY_DIR, 'gae-pipeline-1.9.22.1'), - os.path.join(THIRD_PARTY_DIR, 'graphy-1.0.0'), - os.path.join(THIRD_PARTY_DIR, 'html5lib-python-1.0.1'), - os.path.join(THIRD_PARTY_DIR, 'mutagen-1.42.0'), - os.path.join(THIRD_PARTY_DIR, 'simplejson-3.16.0'), - os.path.join(THIRD_PARTY_DIR, 'six-1.12.0'), - os.path.join(THIRD_PARTY_DIR, 'soupsieve-1.9.1'), - os.path.join(THIRD_PARTY_DIR, 'webencodings-0.5.1'), -] - -COVERAGE_PATH = os.path.join( - os.getcwd(), '..', 'oppia_tools', 'coverage-4.5.4', 'coverage') -TEST_RUNNER_PATH = os.path.join(os.getcwd(), 'core', 'tests', 'gae_suite.py') -LOG_LOCK = threading.Lock() -ALL_ERRORS = [] -# This should be the same as core.test_utils.LOG_LINE_PREFIX. -LOG_LINE_PREFIX = 'LOG_INFO_TEST: ' -_LOAD_TESTS_DIR = os.path.join(os.getcwd(), 'core', 'tests', 'load_tests') - - -_PARSER = argparse.ArgumentParser() -_PARSER.add_argument( - '--generate_coverage_report', - help='optional; if specified, generates a coverage report', - action='store_true') -_PARSER.add_argument( - '--test_target', - help='optional dotted module name of the test(s) to run', - type=python_utils.UNICODE) -_PARSER.add_argument( - '--test_path', - help='optional subdirectory path containing the test(s) to run', - type=python_utils.UNICODE) -_PARSER.add_argument( - '--exclude_load_tests', - help='optional; if specified, exclude load tests from being run', - action='store_true') -_PARSER.add_argument( - '-v', - '--verbose', - help='optional; if specified, display the output of the tests being run', - action='store_true') - - -def log(message, show_time=False): - """Logs a message to the terminal. - - If show_time is True, prefixes the message with the current time. - """ - with LOG_LOCK: - if show_time: - python_utils.PRINT( - datetime.datetime.utcnow().strftime('%H:%M:%S'), message) - else: - python_utils.PRINT(message) - - -def run_shell_cmd(exe, stdout=subprocess.PIPE, stderr=subprocess.PIPE): - """Runs a shell command and captures the stdout and stderr output. - - If the cmd fails, raises Exception. Otherwise, returns a string containing - the concatenation of the stdout and stderr logs. - """ - p = subprocess.Popen(exe, stdout=stdout, stderr=stderr) - last_stdout_str, last_stderr_str = p.communicate() - # Converting to unicode to stay compatible with the rest of the strings. - last_stdout_str = last_stdout_str.decode(encoding='utf-8') - last_stderr_str = last_stderr_str.decode(encoding='utf-8') - last_stdout = last_stdout_str.split('\n') - - if LOG_LINE_PREFIX in last_stdout_str: - log('') - for line in last_stdout: - if line.startswith(LOG_LINE_PREFIX): - log('INFO: %s' % line[len(LOG_LINE_PREFIX):]) - log('') - - result = '%s%s' % (last_stdout_str, last_stderr_str) - - if p.returncode != 0: - raise Exception('Error %s\n%s' % (p.returncode, result)) - - return result - - -class TaskThread(threading.Thread): - """Runs a task in its own thread.""" - - def __init__(self, func, verbose, name=None): - super(TaskThread, self).__init__() - self.func = func - self.output = None - self.exception = None - self.verbose = verbose - self.name = name - self.finished = False - - def run(self): - try: - self.output = self.func() - if self.verbose: - log('LOG %s:' % self.name, show_time=True) - log(self.output) - log('----------------------------------------') - log('FINISHED %s: %.1f secs' % - (self.name, time.time() - self.start_time), show_time=True) - self.finished = True - except Exception as e: - self.exception = e - if 'KeyboardInterrupt' not in python_utils.convert_to_bytes( - self.exception): - log('ERROR %s: %.1f secs' % - (self.name, time.time() - self.start_time), show_time=True) - self.finished = True - - -class TestingTaskSpec(python_utils.OBJECT): - """Executes a set of tests given a test class name.""" - - def __init__(self, test_target, generate_coverage_report): - self.test_target = test_target - self.generate_coverage_report = generate_coverage_report - - def run(self): - """Runs all tests corresponding to the given test target.""" - test_target_flag = '--test_target=%s' % self.test_target - if self.generate_coverage_report: - exc_list = [ - 'python', COVERAGE_PATH, 'run', '-p', TEST_RUNNER_PATH, - test_target_flag] - else: - exc_list = ['python', TEST_RUNNER_PATH, test_target_flag] - - return run_shell_cmd(exc_list) - - -def _check_all_tasks(tasks): - """Checks the results of all tasks.""" - running_tasks_data = [] - - for task in tasks: - if task.isAlive(): - running_tasks_data.append(' %s (started %s)' % ( - task.name, - time.strftime('%H:%M:%S', time.localtime(task.start_time)) - )) - - if task.exception: - ALL_ERRORS.append(task.exception) - - if running_tasks_data: - log('----------------------------------------') - log('Tasks still running:') - for task_details in running_tasks_data: - log(task_details) - - -def _execute_tasks(tasks, batch_size=24): - """Starts all tasks and checks the results. - - Runs no more than 'batch_size' tasks at a time. - """ - remaining_tasks = [] + tasks - currently_running_tasks = set([]) - - while remaining_tasks or currently_running_tasks: - if currently_running_tasks: - for task in list(currently_running_tasks): - task.join(1) - if not task.isAlive(): - currently_running_tasks.remove(task) - - while remaining_tasks and len(currently_running_tasks) < batch_size: - task = remaining_tasks.pop() - currently_running_tasks.add(task) - task.start() - task.start_time = time.time() - - time.sleep(5) - if remaining_tasks: - log('----------------------------------------') - log('Number of unstarted tasks: %s' % len(remaining_tasks)) - _check_all_tasks(tasks) - log('----------------------------------------') - - -def _get_all_test_targets(test_path=None, include_load_tests=True): - """Returns a list of test targets for all classes under test_path - containing tests. - """ - def _get_test_target_classes(path): - """Returns a list of all test classes in a given test file path. - - Args: - path: str. The path of the test file from which all test classes - are to be extracted. - - Returns: - list. A list of all test classes in a given test file path. - """ - class_names = [] - test_target_path = os.path.relpath( - path, os.getcwd())[:-3].replace('/', '.') - python_module = importlib.import_module(test_target_path) - for name, clazz in inspect.getmembers( - python_module, predicate=inspect.isclass): - all_base_classes = [base_class.__name__ for base_class in - (inspect.getmro(clazz))] - # Check that it is a subclass of 'AppEngineTestBase'. - if 'AppEngineTestBase' in all_base_classes: - class_names.append(name) - - return [ - '%s.%s' % (test_target_path, class_name) - for class_name in class_names] - - base_path = os.path.join(os.getcwd(), test_path or '') - result = [] - excluded_dirs = ['.git', 'third_party', 'core/tests', 'node_modules'] - for root in os.listdir(base_path): - if any([s in root for s in excluded_dirs]): - continue - if root.endswith('_test.py'): - result = result + ( - _get_test_target_classes(os.path.join(base_path, root))) - for subroot, _, files in os.walk(os.path.join(base_path, root)): - if _LOAD_TESTS_DIR in subroot and include_load_tests: - for f in files: - if f.endswith('_test.py'): - result = result + ( - _get_test_target_classes(os.path.join(subroot, f))) - - for f in files: - if (f.endswith('_test.py') and - os.path.join('core', 'tests') not in subroot): - result = result + ( - _get_test_target_classes(os.path.join(subroot, f))) - - return result - - -def main(argv=None): - """Run the tests.""" - for directory in DIRS_TO_ADD_TO_SYS_PATH: - if not os.path.exists(os.path.dirname(directory)): - raise Exception('Directory %s does not exist.' % directory) - sys.path.insert(0, directory) - - import dev_appserver - dev_appserver.fix_sys_path() - - parsed_args = _PARSER.parse_args(args=argv) - if parsed_args.test_target and parsed_args.test_path: - raise Exception('At most one of test_path and test_target ' - 'should be specified.') - if parsed_args.test_path and '.' in parsed_args.test_path: - raise Exception('The delimiter in test_path should be a slash (/)') - if parsed_args.test_target and '/' in parsed_args.test_target: - raise Exception('The delimiter in test_target should be a dot (.)') - - if parsed_args.test_target: - if '_test' in parsed_args.test_target: - all_test_targets = [parsed_args.test_target] - else: - python_utils.PRINT('') - python_utils.PRINT( - '---------------------------------------------------------') - python_utils.PRINT( - 'WARNING : test_target flag should point to the test file.') - python_utils.PRINT( - '---------------------------------------------------------') - python_utils.PRINT('') - time.sleep(3) - python_utils.PRINT('Redirecting to its corresponding test file...') - all_test_targets = [parsed_args.test_target + '_test'] - else: - include_load_tests = not parsed_args.exclude_load_tests - all_test_targets = _get_all_test_targets( - test_path=parsed_args.test_path, - include_load_tests=include_load_tests) - - # Prepare tasks. - task_to_taskspec = {} - tasks = [] - for test_target in all_test_targets: - test = TestingTaskSpec( - test_target, parsed_args.generate_coverage_report) - task = TaskThread(test.run, parsed_args.verbose, name=test_target) - task_to_taskspec[task] = test - tasks.append(task) - - task_execution_failed = False - try: - _execute_tasks(tasks) - except Exception: - task_execution_failed = True - - for task in tasks: - if task.exception: - log(python_utils.convert_to_bytes(task.exception)) - - python_utils.PRINT('') - python_utils.PRINT('+------------------+') - python_utils.PRINT('| SUMMARY OF TESTS |') - python_utils.PRINT('+------------------+') - python_utils.PRINT('') - - # Check we ran all tests as expected. - total_count = 0 - total_errors = 0 - total_failures = 0 - for task in tasks: - spec = task_to_taskspec[task] - - if not task.finished: - python_utils.PRINT('CANCELED %s' % spec.test_target) - test_count = 0 - elif 'No tests were run' in python_utils.convert_to_bytes( - task.exception): - python_utils.PRINT( - 'ERROR %s: No tests found.' % spec.test_target) - test_count = 0 - elif task.exception: - exc_str = python_utils.convert_to_bytes(task.exception) - python_utils.PRINT(exc_str[exc_str.find('='): exc_str.rfind('-')]) - - tests_failed_regex_match = re.search( - r'Test suite failed: ([0-9]+) tests run, ([0-9]+) errors, ' - '([0-9]+) failures', - python_utils.convert_to_bytes(task.exception)) - - try: - test_count = int(tests_failed_regex_match.group(1)) - errors = int(tests_failed_regex_match.group(2)) - failures = int(tests_failed_regex_match.group(3)) - total_errors += errors - total_failures += failures - python_utils.PRINT('FAILED %s: %s errors, %s failures' % ( - spec.test_target, errors, failures)) - except AttributeError: - # There was an internal error, and the tests did not run (The - # error message did not match `tests_failed_regex_match`). - test_count = 0 - total_errors += 1 - python_utils.PRINT('') - python_utils.PRINT( - '------------------------------------------------------') - python_utils.PRINT( - ' WARNING: FAILED TO RUN %s' % spec.test_target) - python_utils.PRINT('') - python_utils.PRINT( - ' This is most likely due to an import error.') - python_utils.PRINT( - '------------------------------------------------------') - else: - try: - tests_run_regex_match = re.search( - r'Ran ([0-9]+) tests? in ([0-9\.]+)s', task.output) - test_count = int(tests_run_regex_match.group(1)) - test_time = float(tests_run_regex_match.group(2)) - python_utils.PRINT( - 'SUCCESS %s: %d tests (%.1f secs)' % - (spec.test_target, test_count, test_time)) - except Exception: - python_utils.PRINT( - 'An unexpected error occurred. ' - 'Task output:\n%s' % task.output) - - total_count += test_count - - python_utils.PRINT('') - if total_count == 0: - raise Exception('WARNING: No tests were run.') - else: - python_utils.PRINT('Ran %s test%s in %s test class%s.' % ( - total_count, '' if total_count == 1 else 's', - len(tasks), '' if len(tasks) == 1 else 'es')) - - if total_errors or total_failures: - python_utils.PRINT( - '(%s ERRORS, %s FAILURES)' % (total_errors, total_failures)) - else: - python_utils.PRINT('All tests passed.') - - if task_execution_failed: - raise Exception('Task execution failed.') - elif total_errors or total_failures: - raise Exception( - '%s errors, %s failures' % (total_errors, total_failures)) - - -if __name__ == '__main__': - main(argv=sys.argv[1:]) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 29ac46b38f74..b1c0570fef1f 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -1,4 +1,4 @@ -# Copyright 2019 The Oppia Authors. All Rights Reserved. +# Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,24 +12,74 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This script runs all the (Python) backend tests, in parallel.""" +"""Script for running backend tests in parallel. + +This should not be run directly. Instead, navigate to the oppia/ folder and +execute: + + bash scripts/run_backend_tests.sh +""" from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse +import datetime +import importlib +import inspect import os +import re import subprocess import sys +import threading +import time import python_utils -from . import backend_tests from . import build from . import common from . import install_third_party_libs from . import setup from . import setup_gae + +DIRS_TO_ADD_TO_SYS_PATH = [ + os.path.join(common.OPPIA_TOOLS_DIR, 'pylint-1.9.4'), + os.path.join( + common.OPPIA_TOOLS_DIR, 'google_appengine_1.9.67', 'google_appengine'), + os.path.join(common.OPPIA_TOOLS_DIR, 'webtest-2.0.33'), + os.path.join( + common.OPPIA_TOOLS_DIR, 'google_appengine_1.9.67', 'google_appengine', + 'lib', 'webob_0_9'), + os.path.join(common.OPPIA_TOOLS_DIR, 'browsermob-proxy-0.7.1'), + os.path.join(common.OPPIA_TOOLS_DIR, 'selenium-3.13.0'), + os.path.join(common.OPPIA_TOOLS_DIR, 'Pillow-6.0.0'), + common.CURR_DIR, + os.path.join(common.THIRD_PARTY_DIR, 'backports.functools_lru_cache-1.5'), + os.path.join(common.THIRD_PARTY_DIR, 'beautifulsoup4-4.7.1'), + os.path.join(common.THIRD_PARTY_DIR, 'bleach-3.1.0'), + os.path.join(common.THIRD_PARTY_DIR, 'callbacks-0.3.0'), + os.path.join(common.THIRD_PARTY_DIR, 'gae-cloud-storage-1.9.22.1'), + os.path.join(common.THIRD_PARTY_DIR, 'gae-mapreduce-1.9.22.0'), + os.path.join(common.THIRD_PARTY_DIR, 'gae-pipeline-1.9.22.1'), + os.path.join(common.THIRD_PARTY_DIR, 'graphy-1.0.0'), + os.path.join(common.THIRD_PARTY_DIR, 'html5lib-python-1.0.1'), + os.path.join(common.THIRD_PARTY_DIR, 'mutagen-1.42.0'), + os.path.join(common.THIRD_PARTY_DIR, 'simplejson-3.16.0'), + os.path.join(common.THIRD_PARTY_DIR, 'six-1.12.0'), + os.path.join(common.THIRD_PARTY_DIR, 'soupsieve-1.9.1'), + os.path.join(common.THIRD_PARTY_DIR, 'webencodings-0.5.1'), +] + +COVERAGE_PATH = os.path.join( + os.getcwd(), '..', 'oppia_tools', 'coverage-4.5.4', 'coverage') +TEST_RUNNER_PATH = os.path.join(os.getcwd(), 'core', 'tests', 'gae_suite.py') +LOG_LOCK = threading.Lock() +ALL_ERRORS = [] +# This should be the same as core.test_utils.LOG_LINE_PREFIX. +LOG_LINE_PREFIX = 'LOG_INFO_TEST: ' +_LOAD_TESTS_DIR = os.path.join(os.getcwd(), 'core', 'tests', 'load_tests') + + _PARSER = argparse.ArgumentParser(description=""" Run this script from the oppia root folder: @@ -39,18 +89,237 @@ '--generate_coverage_report', help='optional; if specified, generates a coverage report', action='store_true') +_PARSER.add_argument( + '--test_target', + help='optional dotted module name of the test(s) to run', + type=python_utils.UNICODE) +_PARSER.add_argument( + '--test_path', + help='optional subdirectory path containing the test(s) to run', + type=python_utils.UNICODE) +_PARSER.add_argument( + '--exclude_load_tests', + help='optional; if specified, exclude load tests from being run', + action='store_true') +_PARSER.add_argument( + '-v', + '--verbose', + help='optional; if specified, display the output of the tests being run', + action='store_true') + + +def log(message, show_time=False): + """Logs a message to the terminal. + + If show_time is True, prefixes the message with the current time. + """ + with LOG_LOCK: + if show_time: + python_utils.PRINT( + datetime.datetime.utcnow().strftime('%H:%M:%S'), message) + else: + python_utils.PRINT(message) + + +def run_shell_cmd(exe, stdout=subprocess.PIPE, stderr=subprocess.PIPE): + """Runs a shell command and captures the stdout and stderr output. + + If the cmd fails, raises Exception. Otherwise, returns a string containing + the concatenation of the stdout and stderr logs. + """ + p = subprocess.Popen(exe, stdout=stdout, stderr=stderr) + last_stdout_str, last_stderr_str = p.communicate() + # Converting to unicode to stay compatible with the rest of the strings. + last_stdout_str = last_stdout_str.decode(encoding='utf-8') + last_stderr_str = last_stderr_str.decode(encoding='utf-8') + last_stdout = last_stdout_str.split('\n') + + if LOG_LINE_PREFIX in last_stdout_str: + log('') + for line in last_stdout: + if line.startswith(LOG_LINE_PREFIX): + log('INFO: %s' % line[len(LOG_LINE_PREFIX):]) + log('') + + result = '%s%s' % (last_stdout_str, last_stderr_str) + + if p.returncode != 0: + raise Exception('Error %s\n%s' % (p.returncode, result)) + + return result + + +class TaskThread(threading.Thread): + """Runs a task in its own thread.""" + + def __init__(self, func, verbose, name=None): + super(TaskThread, self).__init__() + self.func = func + self.output = None + self.exception = None + self.verbose = verbose + self.name = name + self.finished = False + + def run(self): + try: + self.output = self.func() + if self.verbose: + log('LOG %s:' % self.name, show_time=True) + log(self.output) + log('----------------------------------------') + log('FINISHED %s: %.1f secs' % + (self.name, time.time() - self.start_time), show_time=True) + self.finished = True + except Exception as e: + self.exception = e + if 'KeyboardInterrupt' not in python_utils.convert_to_bytes( + self.exception): + log('ERROR %s: %.1f secs' % + (self.name, time.time() - self.start_time), show_time=True) + self.finished = True + + +class TestingTaskSpec(python_utils.OBJECT): + """Executes a set of tests given a test class name.""" + + def __init__(self, test_target, generate_coverage_report): + self.test_target = test_target + self.generate_coverage_report = generate_coverage_report + + def run(self): + """Runs all tests corresponding to the given test target.""" + test_target_flag = '--test_target=%s' % self.test_target + if self.generate_coverage_report: + exc_list = [ + 'python', COVERAGE_PATH, 'run', '-p', TEST_RUNNER_PATH, + test_target_flag] + else: + exc_list = ['python', TEST_RUNNER_PATH, test_target_flag] + + return run_shell_cmd(exc_list) + + +def _check_all_tasks(tasks): + """Checks the results of all tasks.""" + running_tasks_data = [] + + for task in tasks: + if task.isAlive(): + running_tasks_data.append(' %s (started %s)' % ( + task.name, + time.strftime('%H:%M:%S', time.localtime(task.start_time)) + )) + + if task.exception: + ALL_ERRORS.append(task.exception) + + if running_tasks_data: + log('----------------------------------------') + log('Tasks still running:') + for task_details in running_tasks_data: + log(task_details) + + +def _execute_tasks(tasks, batch_size=24): + """Starts all tasks and checks the results. + + Runs no more than 'batch_size' tasks at a time. + """ + remaining_tasks = [] + tasks + currently_running_tasks = set([]) + + while remaining_tasks or currently_running_tasks: + if currently_running_tasks: + for task in list(currently_running_tasks): + task.join(1) + if not task.isAlive(): + currently_running_tasks.remove(task) + + while remaining_tasks and len(currently_running_tasks) < batch_size: + task = remaining_tasks.pop() + currently_running_tasks.add(task) + task.start() + task.start_time = time.time() + + time.sleep(5) + if remaining_tasks: + log('----------------------------------------') + log('Number of unstarted tasks: %s' % len(remaining_tasks)) + _check_all_tasks(tasks) + log('----------------------------------------') + + +def _get_all_test_targets(test_path=None, include_load_tests=True): + """Returns a list of test targets for all classes under test_path + containing tests. + """ + def _get_test_target_classes(path): + """Returns a list of all test classes in a given test file path. + + Args: + path: str. The path of the test file from which all test classes + are to be extracted. + + Returns: + list. A list of all test classes in a given test file path. + """ + class_names = [] + test_target_path = os.path.relpath( + path, os.getcwd())[:-3].replace('/', '.') + python_module = importlib.import_module(test_target_path) + for name, clazz in inspect.getmembers( + python_module, predicate=inspect.isclass): + all_base_classes = [base_class.__name__ for base_class in + (inspect.getmro(clazz))] + # Check that it is a subclass of 'AppEngineTestBase'. + if 'AppEngineTestBase' in all_base_classes: + class_names.append(name) + + return [ + '%s.%s' % (test_target_path, class_name) + for class_name in class_names] + + base_path = os.path.join(os.getcwd(), test_path or '') + result = [] + excluded_dirs = ['.git', 'third_party', 'core/tests', 'node_modules'] + for root in os.listdir(base_path): + if any([s in root for s in excluded_dirs]): + continue + if root.endswith('_test.py'): + result = result + ( + _get_test_target_classes(os.path.join(base_path, root))) + for subroot, _, files in os.walk(os.path.join(base_path, root)): + if _LOAD_TESTS_DIR in subroot and include_load_tests: + for f in files: + if f.endswith('_test.py'): + result = result + ( + _get_test_target_classes(os.path.join(subroot, f))) + + for f in files: + if (f.endswith('_test.py') and + os.path.join('core', 'tests') not in subroot): + result = result + ( + _get_test_target_classes(os.path.join(subroot, f))) + + return result def main(argv=None): - """Runs the backend tests.""" + """Run the tests.""" setup.main() setup_gae.main() # Install third party dependencies. install_third_party_libs.main(argv=[]) - coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') - coverage_path = os.path.join(coverage_home, 'coverage') + for directory in DIRS_TO_ADD_TO_SYS_PATH: + if not os.path.exists(os.path.dirname(directory)): + raise Exception('Directory %s does not exist.' % directory) + sys.path.insert(0, directory) + + import dev_appserver + dev_appserver.fix_sys_path() parsed_args = _PARSER.parse_args(args=argv) if parsed_args.generate_coverage_report: @@ -77,17 +346,153 @@ def main(argv=None): os.path.join(common.NODE_MODULES_PATH, 'webpack', 'bin', 'webpack.js'), '--config', 'webpack.dev.config.ts']) - backend_tests.main(argv=argv) + if parsed_args.test_target and parsed_args.test_path: + raise Exception('At most one of test_path and test_target ' + 'should be specified.') + if parsed_args.test_path and '.' in parsed_args.test_path: + raise Exception('The delimiter in test_path should be a slash (/)') + if parsed_args.test_target and '/' in parsed_args.test_target: + raise Exception('The delimiter in test_target should be a dot (.)') + + if parsed_args.test_target: + if '_test' in parsed_args.test_target: + all_test_targets = [parsed_args.test_target] + else: + python_utils.PRINT('') + python_utils.PRINT( + '---------------------------------------------------------') + python_utils.PRINT( + 'WARNING : test_target flag should point to the test file.') + python_utils.PRINT( + '---------------------------------------------------------') + python_utils.PRINT('') + time.sleep(3) + python_utils.PRINT('Redirecting to its corresponding test file...') + all_test_targets = [parsed_args.test_target + '_test'] + else: + include_load_tests = not parsed_args.exclude_load_tests + all_test_targets = _get_all_test_targets( + test_path=parsed_args.test_path, + include_load_tests=include_load_tests) + + # Prepare tasks. + task_to_taskspec = {} + tasks = [] + for test_target in all_test_targets: + test = TestingTaskSpec( + test_target, parsed_args.generate_coverage_report) + task = TaskThread(test.run, parsed_args.verbose, name=test_target) + task_to_taskspec[task] = test + tasks.append(task) + + task_execution_failed = False + try: + _execute_tasks(tasks) + except Exception: + task_execution_failed = True + + for task in tasks: + if task.exception: + log(python_utils.convert_to_bytes(task.exception)) + + python_utils.PRINT('') + python_utils.PRINT('+------------------+') + python_utils.PRINT('| SUMMARY OF TESTS |') + python_utils.PRINT('+------------------+') + python_utils.PRINT('') + + # Check we ran all tests as expected. + total_count = 0 + total_errors = 0 + total_failures = 0 + for task in tasks: + spec = task_to_taskspec[task] + + if not task.finished: + python_utils.PRINT('CANCELED %s' % spec.test_target) + test_count = 0 + elif 'No tests were run' in python_utils.convert_to_bytes( + task.exception): + python_utils.PRINT( + 'ERROR %s: No tests found.' % spec.test_target) + test_count = 0 + elif task.exception: + exc_str = python_utils.convert_to_bytes(task.exception) + python_utils.PRINT(exc_str[exc_str.find('='): exc_str.rfind('-')]) + + tests_failed_regex_match = re.search( + r'Test suite failed: ([0-9]+) tests run, ([0-9]+) errors, ' + '([0-9]+) failures', + python_utils.convert_to_bytes(task.exception)) + + try: + test_count = int(tests_failed_regex_match.group(1)) + errors = int(tests_failed_regex_match.group(2)) + failures = int(tests_failed_regex_match.group(3)) + total_errors += errors + total_failures += failures + python_utils.PRINT('FAILED %s: %s errors, %s failures' % ( + spec.test_target, errors, failures)) + except AttributeError: + # There was an internal error, and the tests did not run (The + # error message did not match `tests_failed_regex_match`). + test_count = 0 + total_errors += 1 + python_utils.PRINT('') + python_utils.PRINT( + '------------------------------------------------------') + python_utils.PRINT( + ' WARNING: FAILED TO RUN %s' % spec.test_target) + python_utils.PRINT('') + python_utils.PRINT( + ' This is most likely due to an import error.') + python_utils.PRINT( + '------------------------------------------------------') + else: + try: + tests_run_regex_match = re.search( + r'Ran ([0-9]+) tests? in ([0-9\.]+)s', task.output) + test_count = int(tests_run_regex_match.group(1)) + test_time = float(tests_run_regex_match.group(2)) + python_utils.PRINT( + 'SUCCESS %s: %d tests (%.1f secs)' % + (spec.test_target, test_count, test_time)) + except Exception: + python_utils.PRINT( + 'An unexpected error occurred. ' + 'Task output:\n%s' % task.output) + + total_count += test_count + + python_utils.PRINT('') + if total_count == 0: + raise Exception('WARNING: No tests were run.') + else: + python_utils.PRINT('Ran %s test%s in %s test class%s.' % ( + total_count, '' if total_count == 1 else 's', + len(tasks), '' if len(tasks) == 1 else 'es')) + + if total_errors or total_failures: + python_utils.PRINT( + '(%s ERRORS, %s FAILURES)' % (total_errors, total_failures)) + else: + python_utils.PRINT('All tests passed.') + + if task_execution_failed: + raise Exception('Task execution failed.') + elif total_errors or total_failures: + raise Exception( + '%s errors, %s failures' % (total_errors, total_failures)) if parsed_args.generate_coverage_report: - subprocess.call(['python', coverage_path, 'combine']) + subprocess.call(['python', COVERAGE_PATH, 'combine']) subprocess.call([ - 'python', coverage_path, 'report', + 'python', COVERAGE_PATH, 'report', '--omit="%s*","third_party/*","/usr/share/*"' % common.OPPIA_TOOLS_DIR, '--show-missing']) python_utils.PRINT('Generating xml coverage report...') - subprocess.call(['python', coverage_path, 'xml']) + subprocess.call(['python', COVERAGE_PATH, 'xml']) python_utils.PRINT('') python_utils.PRINT('Done!') From a6e6b28b22202cc0e6d28e0633dadff02e556646 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 19:21:57 +0530 Subject: [PATCH 117/141] address comments --- scripts/install_third_party_libs.py | 4 ++-- scripts/pylint_extensions.py | 6 ++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 62dc87c0ae1b..859cd566b5f3 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -32,13 +32,13 @@ os.path.join(TOOLS_DIR, 'pyyaml-5.1.2')]) # Download and install future. -if not os.path.exists(os.path.join('third_party', 'future==0.17.1')): +if not os.path.exists(os.path.join('third_party', 'future-0.17.1')): subprocess.call([ 'pip', 'install', 'future==0.17.1', '--target', os.path.join('third_party', 'future-0.17.1')]) # Download and install psutil. -if not os.path.exists(os.path.join(TOOLS_DIR, 'psutil==5.6.3')): +if not os.path.exists(os.path.join(TOOLS_DIR, 'psutil-5.6.3')): subprocess.call([ 'pip', 'install', 'psutil==5.6.3', '--target', os.path.join(TOOLS_DIR, 'psutil-5.6.3')]) diff --git a/scripts/pylint_extensions.py b/scripts/pylint_extensions.py index d8f1dcc8a7d7..bd71ae98d69c 100644 --- a/scripts/pylint_extensions.py +++ b/scripts/pylint_extensions.py @@ -20,7 +20,13 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import os import re +import sys + +_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) +_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') +sys.path.insert(0, _PYLINT_PATH) import astroid from pylint import checkers From 28eaabeddf092ea692f1b5a051aba48290f2a02c Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 19:34:47 +0530 Subject: [PATCH 118/141] fix --- scripts/run_e2e_tests.py | 252 --------------------------------------- 1 file changed, 252 deletions(-) delete mode 100644 scripts/run_e2e_tests.py diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py deleted file mode 100644 index ea9cc5969183..000000000000 --- a/scripts/run_e2e_tests.py +++ /dev/null @@ -1,252 +0,0 @@ -# Copyright 2019 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS-IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Runs the end to end tests.""" -from __future__ import absolute_import # pylint: disable=import-only-modules -from __future__ import unicode_literals # pylint: disable=import-only-modules - -import argparse -import atexit -import fileinput -import os -import re -import shutil -import subprocess -import sys -import time - -import python_utils - -from . import build -from . import common -from . import install_chrome_on_travis -from . import install_third_party_libs -from . import setup -from . import setup_gae - -_PARSER = argparse.ArgumentParser(description=""" - Run this script from the oppia root folder: - - python -m scripts.run_e2e_tests - - The root folder MUST be named 'oppia'. - - Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run - a single test or test suite.""") - -_PARSER.add_argument( - '--skip_install', - help='optional; if specified, skips installing dependencies', - action='store_true') -_PARSER.add_argument( - '--run_minified_tests', - help='optional; if specified, runs frontend karma tests on both minified ' - 'and non-minified code', - action='store_true') -_PARSER.add_argument( - '--prod_env', - help='optional; if specified, emulate running Oppia in a production ' - 'environment.', - action='store_true') -_PARSER.add_argument( - '--browserstack', - help='optional; if specified, run the e2e tests on browserstack.', - action='store_true') -_PARSER.add_argument( - '--suite', - help='Performs test for different suites. Performs a full test by default.', - default='full') -_PARSER.add_argument( - '--sharding', - help='optional; if specified, Disables parallelization of protractor tests', - action='store_true') -_PARSER.add_argument( - '--sharding_instances', - help='Sets the number of parallel browsers to open while sharding', - default='3') - -PORT_NUMBER_FOR_SELENIUM_SERVER = 4444 -PORT_NUMBER_FOR_GAE_SERVER = 9001 -USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START = 8181 - - -def cleanup(): - """Send a kill signal to the dev server and Selenium server.""" - common.kill_process(PORT_NUMBER_FOR_SELENIUM_SERVER) - common.kill_process(PORT_NUMBER_FOR_GAE_SERVER) - - # Wait for the servers to go down; suppress 'connection refused' error - # output from nc since that is exactly what we are expecting to happen. - while common.is_port_open( - PORT_NUMBER_FOR_SELENIUM_SERVER) or common.is_port_open( - PORT_NUMBER_FOR_GAE_SERVER): - time.sleep(1) - - if os.path.isdir(os.path.join('..', 'protractor-screenshots')): - common.print_each_string_after_two_new_lines([ - 'Note: If ADD_SCREENSHOT_REPORTER is set to true in', - 'core/tests/protractor.conf.js, you can view screenshots', - 'of the failed tests in ../protractor-screenshots/']) - - python_utils.PRINT('Done!') - - -def main(argv=None): - """Runs the end to end tests.""" - setup.main() - setup_gae.main() - if os.environ.get('TRAVIS'): - install_chrome_on_travis.main() - - parsed_args = _PARSER.parse_args(args=argv) - install_third_party_libs.maybe_install_dependencies( - parsed_args.skip_install, parsed_args.run_minified_tests) - - if common.is_port_open(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START): - common.print_each_string_after_two_new_lines([ - 'There is already a server running on localhost:%s.' - % python_utils.UNICODE(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START), - 'Please terminate it before running the end-to-end tests.', - 'Exiting.']) - raise Exception - - if common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): - common.print_each_string_after_two_new_lines([ - 'There is already a server running on localhost:%s.' - % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), - 'Please terminate it before running the end-to-end tests.', - 'Exiting.']) - raise Exception - - # Forces the cleanup function to run on exit. - # Developers: note that at the end of this script, the cleanup() function at - # the top of the file is run. - atexit.register(cleanup) - - if parsed_args.prod_env: - dev_mode = 'false' - python_utils.PRINT('Generating files for production mode...') - constants_env_variable = '\'DEV_MODE\': false' - for line in fileinput.input( - files=[os.path.join('assets', 'constants.ts')], inplace=True): - # Inside this loop the STDOUT will be redirected to the file, - # constants.ts. The end='' is needed to avoid double line breaks. - python_utils.PRINT( - re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), - end='') - build.main(argv=['--prod_env']) - app_yaml_filepath = 'app.yaml' - else: - dev_mode = 'true' - constants_env_variable = '\'DEV_MODE\': true' - for line in fileinput.input( - files=[os.path.join('assets', 'constants.ts')], inplace=True): - # Inside this loop the STDOUT will be redirected to the file, - # constants.ts. The end='' is needed to avoid double line breaks. - python_utils.PRINT( - re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), - end='') - build.main(argv=[]) - app_yaml_filepath = 'app_dev.yaml' - - # Start a selenium server using chromedriver 2.41. - # The 'detach' option continues the flow once the server is up and runnning. - # The 'quiet' option prints only the necessary information about the server - # start-up process. - subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), - 'update', '--versions.chrome', '2.41']) - subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), - 'start', '--versions.chrome', '2.41', '--detach', '--quiet']) - - # Start a selenium process. The program sends thousands of lines of useless - # info logs to stderr so we discard them. - # TODO(jacob): Find a webdriver or selenium argument that controls log - # level. - background_processes = [] - background_processes.append(subprocess.Popen([ - os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), - 'start', '2>/dev/null'])) - # Start a demo server. - background_processes.append(subprocess.Popen( - 'python %s/dev_appserver.py --host=0.0.0.0 --port=%s ' - '--clear_datastore=yes --dev_appserver_log_level=critical ' - '--log_level=critical --skip_sdk_update_check=true %s' % ( - common.GOOGLE_APP_ENGINE_HOME, - python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), - app_yaml_filepath), shell=True)) - - # Wait for the servers to come up. - while not common.is_port_open( - PORT_NUMBER_FOR_SELENIUM_SERVER) or not common.is_port_open( - PORT_NUMBER_FOR_GAE_SERVER): - time.sleep(1) - - # Delete outdated screenshots. - if os.path.isdir(os.path.join('..', 'protractor-screenshots')): - shutil.rmtree(os.path.join('..', 'protractor-screenshots')) - - # Run the end-to-end tests. The conditional is used to run protractor - # without any sharding parameters if it is disabled. This helps with - # isolated tests. Isolated tests do not work properly unless no sharding - # parameters are passed in at all. - # TODO(bhenning): Figure out if this is a bug with protractor. - if not parsed_args.browserstack: - if not parsed_args.sharding or parsed_args.sharding_instances == '1': - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join('core', 'tests', 'protractor.conf.js'), '--suite', - parsed_args.suite, '--params.devMode="%s"' % dev_mode]) - else: - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join('core', 'tests', 'protractor.conf.js'), - '--capabilities.shardTestFiles=%s' % parsed_args.sharding, - '--capabilities.maxInstances=%s' - % parsed_args.sharding_instances, '--suite', parsed_args.suite, - '--params.devMode="%s"' % dev_mode]) - else: - python_utils.PRINT('Running the tests on browserstack...') - if not parsed_args.sharding or parsed_args.sharding_instances == '1': - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join( - 'core', 'tests', 'protractor-browserstack.conf.js'), - '--suite', parsed_args.suite, - '--params.devMode="%s"' % dev_mode]) - else: - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join( - 'core', 'tests', 'protractor-browserstack.conf.js'), - '--capabilities.shardTestFiles=%s' % parsed_args.sharding, - '--capabilities.maxInstances=%s' - % parsed_args.sharding_instances, '--suite', parsed_args.suite, - '--params.devMode="%s"' % dev_mode]) - - for process in background_processes: - process.wait() - - -if __name__ == '__main__': - main(argv=sys.argv[1:]) From 6e9b99390b7ef5dbfac0148e0121601874aeaccf Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 19:34:54 +0530 Subject: [PATCH 119/141] fix --- scripts/run_e2e_tests.sh | 227 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 227 insertions(+) create mode 100644 scripts/run_e2e_tests.sh diff --git a/scripts/run_e2e_tests.sh b/scripts/run_e2e_tests.sh new file mode 100644 index 000000000000..ee73165dcad5 --- /dev/null +++ b/scripts/run_e2e_tests.sh @@ -0,0 +1,227 @@ +#!/usr/bin/env bash + +# Copyright 2014 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +########################################################################## + +# INSTRUCTIONS: +# +# Run this script from the oppia root folder: +# bash scripts/run_e2e_tests.sh +# +# Optional arguments: +# --browserstack Run the tests on browserstack using the +# protractor-browserstack.conf.js file. +# --skip-install=true/false If true, skips installing dependencies. The +# default value is false. +# --sharding=true/false Disables/Enables parallelization of protractor tests. +# --sharding-instances=# Sets the number of parallel browsers to open while +# sharding. +# --prod_env Run the tests in prod mode. Static resources are served from +# build directory and use cache slugs. +# Sharding must be disabled (either by passing in false to --sharding or 1 to +# --sharding-instances) if running any tests in isolation (fit or fdescribe). +# --suite=suite_name Performs test for different suites, here suites are the +# name of the test files present in core/tests/protractor_desktop/ and +# core/test/protractor/ dirs. e.g. for the file +# core/tests/protractor/accessibility.js use --suite=accessibility. +# For performing a full test, no argument is required. +# +# The root folder MUST be named 'oppia'. +# +# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a +# single test or test suite. + +function cleanup { + # Send a kill signal to the dev server and Selenium server. The awk command + # gets just the process ID from the grepped line. + kill `ps aux | grep "[Dd]ev_appserver.py --host=0.0.0.0 --port=9001" | awk '{print $2}'` + kill `ps aux | grep node_modules/webdriver-manager/selenium | awk '{print $2}'` + + # Wait for the servers to go down; suppress "connection refused" error output + # from nc since that is exactly what we are expecting to happen. + while ( nc -vz localhost 4444 >/dev/null 2>&1 ); do sleep 1; done + while ( nc -vz localhost 9001 >/dev/null 2>&1 ); do sleep 1; done + + if [ -d "../protractor-screenshots" ]; then + echo "" + echo " Note: If ADD_SCREENSHOT_REPORTER is set to true in" + echo " core/tests/protractor.conf.js, you can view screenshots" + echo " of the failed tests in ../protractor-screenshots/" + echo "" + fi + + echo Done! +} + +if [ -z "$BASH_VERSION" ] +then + echo "" + echo " Please run me using bash: " + echo "" + echo " bash $0" + echo "" + return 1 +fi + +set -e +python -m scripts.setup +python -m scripts.setup_gae +if [ "$TRAVIS" == 'true' ]; then + python -m scripts.install_chrome_on_travis +fi + +if ( nc -vz localhost 8181 ); then + echo "" + echo " There is already a server running on localhost:8181." + echo " Please terminate it before running the end-to-end tests." + echo " Exiting." + echo "" + exit 1 +fi + +if ( nc -vz localhost 9001 ); then + echo "" + echo " There is a already a server running on localhost:9001." + echo " Please terminate it before running the end-to-end tests." + echo " Exiting." + echo "" + exit 1 +fi + + +# Forces the cleanup function to run on exit. +# Developers: note that at the end of this script, the cleanup() function at +# the top of the file is run. +trap cleanup EXIT + +# Argument passed to feconf.py to help choose production templates folder. +DEV_MODE=true +RUN_ON_BROWSERSTACK=False +for arg in "$@"; do + # Used to emulate running Oppia in a production environment. + if [ "$arg" == "--prod_env" ]; then + DEV_MODE=false + echo " Generating files for production mode..." + fi + + # Used to run the e2e tests on browserstack. + if [ "$arg" == "--browserstack" ]; then + RUN_ON_BROWSERSTACK=True + echo " Running the tests on browserstack..." + fi +done + +if [[ "DEV_MODE" == "true" ]]; then + constants_env_variable="\"DEV_MODE\": true" + sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts + python -m scripts.build + APP_YAML_FILEPATH="app_dev.yaml" +else + constants_env_variable="\"DEV_MODE\": false" + sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts + python -m scripts.build --prod_env + APP_YAML_FILEPATH="app.yaml" +fi + +# Delete the modified feconf.py file(-i.bak) +rm assets/constants.ts.bak + +# Start a selenium server using chromedriver 2.41. +# The 'detach' option continues the flow once the server is up and runnning. +# The 'quiet' option prints only the necessary information about the server start-up +# process. +node_modules/.bin/webdriver-manager update --versions.chrome 2.41 +node_modules/.bin/webdriver-manager start --versions.chrome 2.41 --detach --quiet + +# Start a selenium process. The program sends thousands of lines of useless +# info logs to stderr so we discard them. +# TODO(jacob): Find a webdriver or selenium argument that controls log level. +(node_modules/.bin/webdriver-manager start 2>/dev/null)& +# Start a demo server. +(python ../oppia_tools/google_appengine_1.9.67/google_appengine/dev_appserver.py --host=0.0.0.0 --port=9001 --clear_datastore=yes --dev_appserver_log_level=critical --log_level=critical --skip_sdk_update_check=true $APP_YAML_FILEPATH)& + +# Wait for the servers to come up. +while ! nc -vz localhost 4444; do sleep 1; done +while ! nc -vz localhost 9001; do sleep 1; done + +# Delete outdated screenshots +if [ -d "../protractor-screenshots" ]; then + rm -r ../protractor-screenshots +fi + +# Parse additional command line arguments that may be passed to protractor. +# Credit: http://stackoverflow.com/questions/192249 +# Passing different suites and sharding parameters for tests. +SUITE="full" +SHARDING=true +SHARD_INSTANCES=3 +for j in "$@"; do + # Match each space-separated argument passed to the shell file to a separate + # case label, based on a pattern. E.g. Match to -suite=*, -sharding=*, where the + # asterisk refers to any characters following the equals sign, other than + # whitespace. + case $j in + --suite=*) + # Extract the value right of the equal sign by substringing the $i variable + # at the equal sign. + # http://tldp.org/LDP/abs/html/string-manipulation.html + SUITE="${j#*=}" + # Shifts the argument parameters over by one. E.g. $2 becomes $1, etc. + shift + ;; + + --sharding=*) + SHARDING="${j#*=}" + shift + ;; + + --sharding-instances=*) + SHARD_INSTANCES="${j#*=}" + shift + ;; + + --prod_env*) + shift + ;; + + --browserstack*) + shift + ;; + + *) + echo "Error: Unknown command line option: $j" + ;; + esac +done + +# Run the end-to-end tests. The conditional is used to run protractor without +# any sharding parameters if it is disabled. This helps with isolated tests. +# Isolated tests do not work properly unless no sharding parameters are passed +# in at all. +# TODO(bhenning): Figure out if this is a bug with protractor. +if [ "$RUN_ON_BROWSERSTACK" == "False" ]; then + if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then + node_modules/protractor/bin/protractor core/tests/protractor.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE" + else + node_modules/protractor/bin/protractor core/tests/protractor.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE" + fi +else + if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then + node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE" + else + node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE" + fi +fi From 7bf028d423aa75525c44a8a58471991aeb396025 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 19:38:02 +0530 Subject: [PATCH 120/141] fix --- .circleci/config.yml | 1 + .travis.yml | 57 +++++++++++++++++++------------------------- 2 files changed, 25 insertions(+), 33 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 506f8603c840..75c9978ba982 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,6 +15,7 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | + sudo pip install pyyaml python -m scripts.install_third_party_libs anchor_for_installing_cc_test_reporter: &install_cc diff --git a/.travis.yml b/.travis.yml index 3a81d5afc0ac..86ff29050f45 100644 --- a/.travis.yml +++ b/.travis.yml @@ -85,18 +85,9 @@ before_install: - export CHROME_SOURCE_URL=https://github.com/webnicer/chrome-downloads/raw/master/x64.deb/google-chrome-stable_67.0.3396.99-1_amd64.deb - export DISPLAY=:99.0 - bash -e /etc/init.d/xvfb start -- pip install future -- pip install psutil install: - pushd $TRAVIS_BUILD_DIR -- python -m scripts.setup -- python -m scripts.setup_gae -- sudo chown -R $(whoami) node_modules -- sudo chmod -R 744 node_modules -- sudo chown -R $(whoami) $HOME/.config -- sudo chown -R $(whoami) $HOME/.cache -- export PATH=../oppia_tools/node-10.15.3/bin:$PATH - python -m scripts.install_third_party_libs script: @@ -111,30 +102,30 @@ script: # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'true' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh --generate_coverage_report; fi # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'false' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh; fi # Run the e2e tests in the production environment (using --prod_env). -- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="accessibility" --prod_env; fi -- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="additionalEditorAndPlayerFeatures" --prod_env; fi -- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="collections" --prod_env; fi -- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="coreEditorAndPlayerFeatures" --prod_env; fi -- if [ "$RUN_E2E_TESTS_CREATOR_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="creatorDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="embedding" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationFeedbackTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationHistoryTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_IMPROVEMENTS_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationImprovementsTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationStatisticsTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationTranslationTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="extensions" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learnerDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learner" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="library" --prod_env; fi -- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="navigation" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="preferences" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="profileMenu" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="publication" --prod_env; fi -- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="skillEditor" --prod_env; fi -- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="subscriptions" --prod_env; fi -- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicsAndSkillsDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicAndStoryEditor" --prod_env; fi -- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="users" --prod_env; fi +- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="accessibility" --prod_env; fi +- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="additionalEditorAndPlayerFeatures" --prod_env; fi +- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="collections" --prod_env; fi +- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="coreEditorAndPlayerFeatures" --prod_env; fi +- if [ "$RUN_E2E_TESTS_CREATOR_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="creatorDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="embedding" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationFeedbackTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationHistoryTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_IMPROVEMENTS_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationImprovementsTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationStatisticsTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationTranslationTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="extensions" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learnerDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learner" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="library" --prod_env; fi +- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="navigation" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="preferences" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="profileMenu" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="publication" --prod_env; fi +- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="skillEditor" --prod_env; fi +- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="subscriptions" --prod_env; fi +- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicsAndSkillsDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicAndStoryEditor" --prod_env; fi +- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="users" --prod_env; fi # These lines are commented out because these checks are being run on CircleCI # here: https://circleci.com/gh/oppia/oppia # after_success: From f0ebb6041c7c56e03df95b5b64c2d8eee74c0c86 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 20:00:15 +0530 Subject: [PATCH 121/141] fix --- scripts/install_third_party_libs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 859cd566b5f3..133e0f859b58 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -196,7 +196,7 @@ def maybe_install_dependencies( """Parse additional command line arguments.""" if skip_installing_third_party_libs is False: # Install third party dependencies. - main() + main(argv=[]) # Ensure that generated JS and CSS files are in place before running the # tests. python_utils.PRINT('Running build task with concatenation only') From 8fe8d20d15f58264152c3c79ceeffc2d8d240250 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 20:17:25 +0530 Subject: [PATCH 122/141] fix errors --- scripts/create_expression_parser.py | 4 ---- scripts/run_backend_tests.py | 3 --- scripts/start.py | 4 ---- 3 files changed, 11 deletions(-) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 10dfe28df304..1744f9b61453 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -24,7 +24,6 @@ import python_utils from . import common -from . import install_third_party_libs from . import setup @@ -38,9 +37,6 @@ def main(): 'core', 'templates', 'dev', 'head', 'expressions', 'ExpressionParserService.js') - # Install the basic environment, e.g. nodejs. - install_third_party_libs.main() - common.install_npm_library('pegjs', '0.8.0', common.OPPIA_TOOLS_DIR) subprocess.call([ diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index b1c0570fef1f..78c94c773440 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -310,9 +310,6 @@ def main(argv=None): setup.main() setup_gae.main() - # Install third party dependencies. - install_third_party_libs.main(argv=[]) - for directory in DIRS_TO_ADD_TO_SYS_PATH: if not os.path.exists(os.path.dirname(directory)): raise Exception('Directory %s does not exist.' % directory) diff --git a/scripts/start.py b/scripts/start.py index 6374ae2b3064..e8271c860c2a 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -32,7 +32,6 @@ from . import build from . import common -from . import install_third_party_libs from . import setup from . import setup_gae @@ -79,9 +78,6 @@ def main(argv=None): # Runs cleanup function on exit. atexit.register(cleanup) - # Install third party dependencies. - install_third_party_libs.main(argv=[]) - python_utils.PRINT('Oppia setup complete!') # Check that there isn't a server already running. From b73afbf8d9cbd3a0be9a0de37109ec3b38dab88e Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 20:30:22 +0530 Subject: [PATCH 123/141] address comments --- scripts/build.py | 19 ++++++++++--------- scripts/run_backend_tests.py | 17 ++++++++++------- 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/scripts/build.py b/scripts/build.py index 051c524d3557..62ab1c7525fa 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -144,6 +144,15 @@ APP_DEV_YAML_FILEPATH = 'app_dev.yaml' APP_YAML_FILEPATH = 'app.yaml' +_PARSER = argparse.ArgumentParser() +_PARSER.add_argument( + '--prod_env', action='store_true', default=False, dest='prod_mode') +_PARSER.add_argument( + '--minify_third_party_libs_only', action='store_true', default=False, + dest='minify_third_party_libs_only') +_PARSER.add_argument( + '--enable_watcher', action='store_true', default=False) + def generate_app_yaml(): """Generate app.yaml from app_dev.yaml.""" @@ -1357,15 +1366,7 @@ def main(argv=None): built and stored. Depending on the options passed to the script, might also minify third-party libraries and/or generate a build directory. """ - parser = argparse.ArgumentParser() - parser.add_argument( - '--prod_env', action='store_true', default=False, dest='prod_mode') - parser.add_argument( - '--minify_third_party_libs_only', action='store_true', default=False, - dest='minify_third_party_libs_only') - parser.add_argument( - '--enable_watcher', action='store_true', default=False) - options = parser.parse_args(args=argv) + options = _PARSER.parse_args(args=argv) # Regenerate /third_party/generated from scratch. safe_delete_directory_tree(THIRD_PARTY_GENERATED_DEV_DIR) build_third_party_libs(THIRD_PARTY_GENERATED_DEV_DIR) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 78c94c773440..146f8d4002fd 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -83,20 +83,23 @@ _PARSER = argparse.ArgumentParser(description=""" Run this script from the oppia root folder: - python -m scripts.run_backend_tests""") + python -m scripts.run_backend_tests -_PARSER.add_argument( - '--generate_coverage_report', - help='optional; if specified, generates a coverage report', - action='store_true') -_PARSER.add_argument( + IMPORTANT: Only one of --test_path and --test_target should be specified""") + +_EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group() +_EXCLUSIVE_GROUP.add_argument( '--test_target', help='optional dotted module name of the test(s) to run', type=python_utils.UNICODE) -_PARSER.add_argument( +_EXCLUSIVE_GROUP.add_argument( '--test_path', help='optional subdirectory path containing the test(s) to run', type=python_utils.UNICODE) +_PARSER.add_argument( + '--generate_coverage_report', + help='optional; if specified, generates a coverage report', + action='store_true') _PARSER.add_argument( '--exclude_load_tests', help='optional; if specified, exclude load tests from being run', From 1b4638bba8e30e7b4fc61c121ab57dd2bf5f0931 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 21:01:16 +0530 Subject: [PATCH 124/141] fix --- .circleci/config.yml | 2 +- .travis.yml | 48 ++++---- scripts/run_e2e_tests.py | 252 +++++++++++++++++++++++++++++++++++++++ scripts/run_e2e_tests.sh | 227 ----------------------------------- 4 files changed, 277 insertions(+), 252 deletions(-) create mode 100644 scripts/run_e2e_tests.py delete mode 100644 scripts/run_e2e_tests.sh diff --git a/.circleci/config.yml b/.circleci/config.yml index 75c9978ba982..5bfcfc8a56c0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -88,7 +88,7 @@ jobs: - run: name: Run frontend tests command: | - python -m scripts.run_frontend_tests --run_minified_tests + python -m scripts.run_frontend_tests --run_minified_tests --skip_install - run: name: Generate frontend coverage report command: | diff --git a/.travis.yml b/.travis.yml index 86ff29050f45..12d8f452625a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -102,30 +102,30 @@ script: # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'true' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh --generate_coverage_report; fi # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'false' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh; fi # Run the e2e tests in the production environment (using --prod_env). -- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="accessibility" --prod_env; fi -- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="additionalEditorAndPlayerFeatures" --prod_env; fi -- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="collections" --prod_env; fi -- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="coreEditorAndPlayerFeatures" --prod_env; fi -- if [ "$RUN_E2E_TESTS_CREATOR_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="creatorDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="embedding" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationFeedbackTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationHistoryTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_IMPROVEMENTS_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationImprovementsTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationStatisticsTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationTranslationTab" --prod_env; fi -- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="extensions" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learnerDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learner" --prod_env; fi -- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="library" --prod_env; fi -- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="navigation" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="preferences" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="profileMenu" --prod_env; fi -- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="publication" --prod_env; fi -- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="skillEditor" --prod_env; fi -- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="subscriptions" --prod_env; fi -- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicsAndSkillsDashboard" --prod_env; fi -- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicAndStoryEditor" --prod_env; fi -- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="users" --prod_env; fi +- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="accessibility" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="additionalEditorAndPlayerFeatures" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="collections" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="coreEditorAndPlayerFeatures" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_CREATOR_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="creatorDashboard" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="embedding" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationFeedbackTab" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationHistoryTab" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_IMPROVEMENTS_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationImprovementsTab" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationStatisticsTab" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationTranslationTab" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="extensions" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learnerDashboard" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learner" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="library" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="navigation" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="preferences" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="profileMenu" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="publication" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="skillEditor" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="subscriptions" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicsAndSkillsDashboard" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicAndStoryEditor" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="users" --prod_env --skip_install; fi # These lines are commented out because these checks are being run on CircleCI # here: https://circleci.com/gh/oppia/oppia # after_success: diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py new file mode 100644 index 000000000000..ea9cc5969183 --- /dev/null +++ b/scripts/run_e2e_tests.py @@ -0,0 +1,252 @@ +# Copyright 2019 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS-IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Runs the end to end tests.""" +from __future__ import absolute_import # pylint: disable=import-only-modules +from __future__ import unicode_literals # pylint: disable=import-only-modules + +import argparse +import atexit +import fileinput +import os +import re +import shutil +import subprocess +import sys +import time + +import python_utils + +from . import build +from . import common +from . import install_chrome_on_travis +from . import install_third_party_libs +from . import setup +from . import setup_gae + +_PARSER = argparse.ArgumentParser(description=""" + Run this script from the oppia root folder: + + python -m scripts.run_e2e_tests + + The root folder MUST be named 'oppia'. + + Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run + a single test or test suite.""") + +_PARSER.add_argument( + '--skip_install', + help='optional; if specified, skips installing dependencies', + action='store_true') +_PARSER.add_argument( + '--run_minified_tests', + help='optional; if specified, runs frontend karma tests on both minified ' + 'and non-minified code', + action='store_true') +_PARSER.add_argument( + '--prod_env', + help='optional; if specified, emulate running Oppia in a production ' + 'environment.', + action='store_true') +_PARSER.add_argument( + '--browserstack', + help='optional; if specified, run the e2e tests on browserstack.', + action='store_true') +_PARSER.add_argument( + '--suite', + help='Performs test for different suites. Performs a full test by default.', + default='full') +_PARSER.add_argument( + '--sharding', + help='optional; if specified, Disables parallelization of protractor tests', + action='store_true') +_PARSER.add_argument( + '--sharding_instances', + help='Sets the number of parallel browsers to open while sharding', + default='3') + +PORT_NUMBER_FOR_SELENIUM_SERVER = 4444 +PORT_NUMBER_FOR_GAE_SERVER = 9001 +USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START = 8181 + + +def cleanup(): + """Send a kill signal to the dev server and Selenium server.""" + common.kill_process(PORT_NUMBER_FOR_SELENIUM_SERVER) + common.kill_process(PORT_NUMBER_FOR_GAE_SERVER) + + # Wait for the servers to go down; suppress 'connection refused' error + # output from nc since that is exactly what we are expecting to happen. + while common.is_port_open( + PORT_NUMBER_FOR_SELENIUM_SERVER) or common.is_port_open( + PORT_NUMBER_FOR_GAE_SERVER): + time.sleep(1) + + if os.path.isdir(os.path.join('..', 'protractor-screenshots')): + common.print_each_string_after_two_new_lines([ + 'Note: If ADD_SCREENSHOT_REPORTER is set to true in', + 'core/tests/protractor.conf.js, you can view screenshots', + 'of the failed tests in ../protractor-screenshots/']) + + python_utils.PRINT('Done!') + + +def main(argv=None): + """Runs the end to end tests.""" + setup.main() + setup_gae.main() + if os.environ.get('TRAVIS'): + install_chrome_on_travis.main() + + parsed_args = _PARSER.parse_args(args=argv) + install_third_party_libs.maybe_install_dependencies( + parsed_args.skip_install, parsed_args.run_minified_tests) + + if common.is_port_open(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START): + common.print_each_string_after_two_new_lines([ + 'There is already a server running on localhost:%s.' + % python_utils.UNICODE(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START), + 'Please terminate it before running the end-to-end tests.', + 'Exiting.']) + raise Exception + + if common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): + common.print_each_string_after_two_new_lines([ + 'There is already a server running on localhost:%s.' + % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), + 'Please terminate it before running the end-to-end tests.', + 'Exiting.']) + raise Exception + + # Forces the cleanup function to run on exit. + # Developers: note that at the end of this script, the cleanup() function at + # the top of the file is run. + atexit.register(cleanup) + + if parsed_args.prod_env: + dev_mode = 'false' + python_utils.PRINT('Generating files for production mode...') + constants_env_variable = '\'DEV_MODE\': false' + for line in fileinput.input( + files=[os.path.join('assets', 'constants.ts')], inplace=True): + # Inside this loop the STDOUT will be redirected to the file, + # constants.ts. The end='' is needed to avoid double line breaks. + python_utils.PRINT( + re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), + end='') + build.main(argv=['--prod_env']) + app_yaml_filepath = 'app.yaml' + else: + dev_mode = 'true' + constants_env_variable = '\'DEV_MODE\': true' + for line in fileinput.input( + files=[os.path.join('assets', 'constants.ts')], inplace=True): + # Inside this loop the STDOUT will be redirected to the file, + # constants.ts. The end='' is needed to avoid double line breaks. + python_utils.PRINT( + re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), + end='') + build.main(argv=[]) + app_yaml_filepath = 'app_dev.yaml' + + # Start a selenium server using chromedriver 2.41. + # The 'detach' option continues the flow once the server is up and runnning. + # The 'quiet' option prints only the necessary information about the server + # start-up process. + subprocess.call([ + os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), + 'update', '--versions.chrome', '2.41']) + subprocess.call([ + os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), + 'start', '--versions.chrome', '2.41', '--detach', '--quiet']) + + # Start a selenium process. The program sends thousands of lines of useless + # info logs to stderr so we discard them. + # TODO(jacob): Find a webdriver or selenium argument that controls log + # level. + background_processes = [] + background_processes.append(subprocess.Popen([ + os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), + 'start', '2>/dev/null'])) + # Start a demo server. + background_processes.append(subprocess.Popen( + 'python %s/dev_appserver.py --host=0.0.0.0 --port=%s ' + '--clear_datastore=yes --dev_appserver_log_level=critical ' + '--log_level=critical --skip_sdk_update_check=true %s' % ( + common.GOOGLE_APP_ENGINE_HOME, + python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), + app_yaml_filepath), shell=True)) + + # Wait for the servers to come up. + while not common.is_port_open( + PORT_NUMBER_FOR_SELENIUM_SERVER) or not common.is_port_open( + PORT_NUMBER_FOR_GAE_SERVER): + time.sleep(1) + + # Delete outdated screenshots. + if os.path.isdir(os.path.join('..', 'protractor-screenshots')): + shutil.rmtree(os.path.join('..', 'protractor-screenshots')) + + # Run the end-to-end tests. The conditional is used to run protractor + # without any sharding parameters if it is disabled. This helps with + # isolated tests. Isolated tests do not work properly unless no sharding + # parameters are passed in at all. + # TODO(bhenning): Figure out if this is a bug with protractor. + if not parsed_args.browserstack: + if not parsed_args.sharding or parsed_args.sharding_instances == '1': + subprocess.call([ + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), + os.path.join('core', 'tests', 'protractor.conf.js'), '--suite', + parsed_args.suite, '--params.devMode="%s"' % dev_mode]) + else: + subprocess.call([ + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), + os.path.join('core', 'tests', 'protractor.conf.js'), + '--capabilities.shardTestFiles=%s' % parsed_args.sharding, + '--capabilities.maxInstances=%s' + % parsed_args.sharding_instances, '--suite', parsed_args.suite, + '--params.devMode="%s"' % dev_mode]) + else: + python_utils.PRINT('Running the tests on browserstack...') + if not parsed_args.sharding or parsed_args.sharding_instances == '1': + subprocess.call([ + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), + os.path.join( + 'core', 'tests', 'protractor-browserstack.conf.js'), + '--suite', parsed_args.suite, + '--params.devMode="%s"' % dev_mode]) + else: + subprocess.call([ + os.path.join( + common.NODE_MODULES_PATH, 'protractor', 'bin', + 'protractor'), + os.path.join( + 'core', 'tests', 'protractor-browserstack.conf.js'), + '--capabilities.shardTestFiles=%s' % parsed_args.sharding, + '--capabilities.maxInstances=%s' + % parsed_args.sharding_instances, '--suite', parsed_args.suite, + '--params.devMode="%s"' % dev_mode]) + + for process in background_processes: + process.wait() + + +if __name__ == '__main__': + main(argv=sys.argv[1:]) diff --git a/scripts/run_e2e_tests.sh b/scripts/run_e2e_tests.sh deleted file mode 100644 index ee73165dcad5..000000000000 --- a/scripts/run_e2e_tests.sh +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/env bash - -# Copyright 2014 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS-IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -########################################################################## - -# INSTRUCTIONS: -# -# Run this script from the oppia root folder: -# bash scripts/run_e2e_tests.sh -# -# Optional arguments: -# --browserstack Run the tests on browserstack using the -# protractor-browserstack.conf.js file. -# --skip-install=true/false If true, skips installing dependencies. The -# default value is false. -# --sharding=true/false Disables/Enables parallelization of protractor tests. -# --sharding-instances=# Sets the number of parallel browsers to open while -# sharding. -# --prod_env Run the tests in prod mode. Static resources are served from -# build directory and use cache slugs. -# Sharding must be disabled (either by passing in false to --sharding or 1 to -# --sharding-instances) if running any tests in isolation (fit or fdescribe). -# --suite=suite_name Performs test for different suites, here suites are the -# name of the test files present in core/tests/protractor_desktop/ and -# core/test/protractor/ dirs. e.g. for the file -# core/tests/protractor/accessibility.js use --suite=accessibility. -# For performing a full test, no argument is required. -# -# The root folder MUST be named 'oppia'. -# -# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a -# single test or test suite. - -function cleanup { - # Send a kill signal to the dev server and Selenium server. The awk command - # gets just the process ID from the grepped line. - kill `ps aux | grep "[Dd]ev_appserver.py --host=0.0.0.0 --port=9001" | awk '{print $2}'` - kill `ps aux | grep node_modules/webdriver-manager/selenium | awk '{print $2}'` - - # Wait for the servers to go down; suppress "connection refused" error output - # from nc since that is exactly what we are expecting to happen. - while ( nc -vz localhost 4444 >/dev/null 2>&1 ); do sleep 1; done - while ( nc -vz localhost 9001 >/dev/null 2>&1 ); do sleep 1; done - - if [ -d "../protractor-screenshots" ]; then - echo "" - echo " Note: If ADD_SCREENSHOT_REPORTER is set to true in" - echo " core/tests/protractor.conf.js, you can view screenshots" - echo " of the failed tests in ../protractor-screenshots/" - echo "" - fi - - echo Done! -} - -if [ -z "$BASH_VERSION" ] -then - echo "" - echo " Please run me using bash: " - echo "" - echo " bash $0" - echo "" - return 1 -fi - -set -e -python -m scripts.setup -python -m scripts.setup_gae -if [ "$TRAVIS" == 'true' ]; then - python -m scripts.install_chrome_on_travis -fi - -if ( nc -vz localhost 8181 ); then - echo "" - echo " There is already a server running on localhost:8181." - echo " Please terminate it before running the end-to-end tests." - echo " Exiting." - echo "" - exit 1 -fi - -if ( nc -vz localhost 9001 ); then - echo "" - echo " There is a already a server running on localhost:9001." - echo " Please terminate it before running the end-to-end tests." - echo " Exiting." - echo "" - exit 1 -fi - - -# Forces the cleanup function to run on exit. -# Developers: note that at the end of this script, the cleanup() function at -# the top of the file is run. -trap cleanup EXIT - -# Argument passed to feconf.py to help choose production templates folder. -DEV_MODE=true -RUN_ON_BROWSERSTACK=False -for arg in "$@"; do - # Used to emulate running Oppia in a production environment. - if [ "$arg" == "--prod_env" ]; then - DEV_MODE=false - echo " Generating files for production mode..." - fi - - # Used to run the e2e tests on browserstack. - if [ "$arg" == "--browserstack" ]; then - RUN_ON_BROWSERSTACK=True - echo " Running the tests on browserstack..." - fi -done - -if [[ "DEV_MODE" == "true" ]]; then - constants_env_variable="\"DEV_MODE\": true" - sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts - python -m scripts.build - APP_YAML_FILEPATH="app_dev.yaml" -else - constants_env_variable="\"DEV_MODE\": false" - sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts - python -m scripts.build --prod_env - APP_YAML_FILEPATH="app.yaml" -fi - -# Delete the modified feconf.py file(-i.bak) -rm assets/constants.ts.bak - -# Start a selenium server using chromedriver 2.41. -# The 'detach' option continues the flow once the server is up and runnning. -# The 'quiet' option prints only the necessary information about the server start-up -# process. -node_modules/.bin/webdriver-manager update --versions.chrome 2.41 -node_modules/.bin/webdriver-manager start --versions.chrome 2.41 --detach --quiet - -# Start a selenium process. The program sends thousands of lines of useless -# info logs to stderr so we discard them. -# TODO(jacob): Find a webdriver or selenium argument that controls log level. -(node_modules/.bin/webdriver-manager start 2>/dev/null)& -# Start a demo server. -(python ../oppia_tools/google_appengine_1.9.67/google_appengine/dev_appserver.py --host=0.0.0.0 --port=9001 --clear_datastore=yes --dev_appserver_log_level=critical --log_level=critical --skip_sdk_update_check=true $APP_YAML_FILEPATH)& - -# Wait for the servers to come up. -while ! nc -vz localhost 4444; do sleep 1; done -while ! nc -vz localhost 9001; do sleep 1; done - -# Delete outdated screenshots -if [ -d "../protractor-screenshots" ]; then - rm -r ../protractor-screenshots -fi - -# Parse additional command line arguments that may be passed to protractor. -# Credit: http://stackoverflow.com/questions/192249 -# Passing different suites and sharding parameters for tests. -SUITE="full" -SHARDING=true -SHARD_INSTANCES=3 -for j in "$@"; do - # Match each space-separated argument passed to the shell file to a separate - # case label, based on a pattern. E.g. Match to -suite=*, -sharding=*, where the - # asterisk refers to any characters following the equals sign, other than - # whitespace. - case $j in - --suite=*) - # Extract the value right of the equal sign by substringing the $i variable - # at the equal sign. - # http://tldp.org/LDP/abs/html/string-manipulation.html - SUITE="${j#*=}" - # Shifts the argument parameters over by one. E.g. $2 becomes $1, etc. - shift - ;; - - --sharding=*) - SHARDING="${j#*=}" - shift - ;; - - --sharding-instances=*) - SHARD_INSTANCES="${j#*=}" - shift - ;; - - --prod_env*) - shift - ;; - - --browserstack*) - shift - ;; - - *) - echo "Error: Unknown command line option: $j" - ;; - esac -done - -# Run the end-to-end tests. The conditional is used to run protractor without -# any sharding parameters if it is disabled. This helps with isolated tests. -# Isolated tests do not work properly unless no sharding parameters are passed -# in at all. -# TODO(bhenning): Figure out if this is a bug with protractor. -if [ "$RUN_ON_BROWSERSTACK" == "False" ]; then - if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then - node_modules/protractor/bin/protractor core/tests/protractor.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE" - else - node_modules/protractor/bin/protractor core/tests/protractor.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE" - fi -else - if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then - node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE" - else - node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE" - fi -fi From 64fc6c504de65e671490e84f2cbc47cc6adc5e42 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 21:31:24 +0530 Subject: [PATCH 125/141] use None and args --- scripts/build.py | 7 +++---- scripts/install_third_party_libs.py | 21 ++++++++++----------- scripts/pre_commit_hook.py | 6 +++--- scripts/pre_commit_linter.py | 6 +++--- scripts/pre_push_hook.py | 6 +++--- scripts/pylint_extensions.py | 20 ++++++++++++-------- scripts/run_backend_tests.py | 8 ++++---- scripts/run_e2e_tests.py | 11 +++++------ scripts/run_frontend_tests.py | 11 +++++------ scripts/run_performance_tests.py | 7 +++---- scripts/run_presubmit_checks.py | 13 ++++++------- scripts/start.py | 11 +++++------ 12 files changed, 62 insertions(+), 65 deletions(-) diff --git a/scripts/build.py b/scripts/build.py index 62ab1c7525fa..553736f7876a 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -26,7 +26,6 @@ import re import shutil import subprocess -import sys import threading import python_utils @@ -1359,14 +1358,14 @@ def compile_typescript_files_continuously(project_dir): return -def main(argv=None): +def main(args=None): """The main method of this script. Creates a third-party directory where all the JS and CSS dependencies are built and stored. Depending on the options passed to the script, might also minify third-party libraries and/or generate a build directory. """ - options = _PARSER.parse_args(args=argv) + options = _PARSER.parse_args(args=args) # Regenerate /third_party/generated from scratch. safe_delete_directory_tree(THIRD_PARTY_GENERATED_DEV_DIR) build_third_party_libs(THIRD_PARTY_GENERATED_DEV_DIR) @@ -1395,4 +1394,4 @@ def main(argv=None): # The 'no coverage' pragma is used as this line is un-testable. This is because # it will only be called when build.py is used as a script. if __name__ == '__main__': # pragma: no cover - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 133e0f859b58..d3d0a85fd6fd 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -21,7 +21,6 @@ import os import shutil import subprocess -import sys # These libraries need to be installed before running or importing any script. TOOLS_DIR = os.path.join('..', 'oppia_tools') @@ -107,7 +106,7 @@ def pip_install(package, version, install_path): install_path]) -def install_skulpt(argv): +def install_skulpt(args): """Download and install Skulpt. Skulpt is built using a Python script included within the Skulpt repository (skulpt.py). This script normally requires GitPython, however the patches to it below @@ -117,7 +116,7 @@ def install_skulpt(argv): warning saying its dist command will not work properly without GitPython, but it does actually work due to the patches. """ - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt python_utils.PRINT('Checking whether Skulpt is installed in third_party') @@ -196,16 +195,16 @@ def maybe_install_dependencies( """Parse additional command line arguments.""" if skip_installing_third_party_libs is False: # Install third party dependencies. - main(argv=[]) + main(args=[]) # Ensure that generated JS and CSS files are in place before running the # tests. python_utils.PRINT('Running build task with concatenation only') - build.main(argv=[]) + build.main(args=[]) if run_minified_tests is True: python_utils.PRINT( 'Running build task with concatenation and minification') - build.main(argv=['--prod_env']) + build.main(args=['--prod_env']) def ensure_pip_library_is_installed(package, version, path): @@ -225,7 +224,7 @@ def ensure_pip_library_is_installed(package, version, path): pip_install(package, version, exact_lib_path) -def main(argv=None): +def main(args=None): """Install third-party libraries for Oppia.""" setup.main() setup_gae.main() @@ -256,16 +255,16 @@ def main(argv=None): # 374076889. subprocess.call([common.NPM_PATH, 'dedupe']) - install_skulpt(argv) + install_skulpt(args) # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') - pre_commit_hook.main(argv=['--install']) + pre_commit_hook.main(args=['--install']) # Install pre-push script. python_utils.PRINT('Installing pre-push hook for git') - pre_push_hook.main(argv=['--install']) + pre_push_hook.main(args=['--install']) if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index 40a4d9f4f39d..518f8e3f93d9 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -126,14 +126,14 @@ def _revert_changes_in_package_lock_file(): raise ValueError(err_unstage_cmd) -def main(argv=None): +def main(args=None): """Main method for pre-commit hook that checks files added/modified in a commit. """ parser = argparse.ArgumentParser() parser.add_argument('--install', action='store_true', default=False, help='Install pre_commit_hook to the .git/hooks dir') - args = parser.parse_args(args=argv) + args = parser.parse_args(args=args) if args.install: _install_hook() return @@ -150,4 +150,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/pre_commit_linter.py b/scripts/pre_commit_linter.py index 04a0dd4b10cd..9c1ff92e971e 100644 --- a/scripts/pre_commit_linter.py +++ b/scripts/pre_commit_linter.py @@ -3147,11 +3147,11 @@ def _print_complete_summary_of_errors(): python_utils.PRINT(error_messages) -def main(argv=None): +def main(args=None): """Main method for pre commit linter script that lints Python, JavaScript, HTML, and CSS files. """ - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) # Default mode is non-verbose mode, if arguments contains --verbose flag it # will be made True, which will represent verbose mode. verbose_mode_enabled = bool(parsed_args.verbose) @@ -3198,4 +3198,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/pre_push_hook.py b/scripts/pre_push_hook.py index ad3dadbf5fd7..b4e6fb5074ae 100755 --- a/scripts/pre_push_hook.py +++ b/scripts/pre_push_hook.py @@ -356,7 +356,7 @@ def _does_diff_include_package_json(files_to_lint): return False -def main(argv=None): +def main(args=None): """Main method for pre-push hook that executes the Python/JS linters on all files that deviate from develop. """ @@ -365,7 +365,7 @@ def main(argv=None): parser.add_argument('url', nargs='?', help='provided by git before push') parser.add_argument('--install', action='store_true', default=False, help='Install pre_push_hook to the .git/hooks dir') - args = parser.parse_args(args=argv) + args = parser.parse_args(args=args) if args.install: _install_hook() return @@ -408,4 +408,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/pylint_extensions.py b/scripts/pylint_extensions.py index bd71ae98d69c..fb25370606e8 100644 --- a/scripts/pylint_extensions.py +++ b/scripts/pylint_extensions.py @@ -24,18 +24,22 @@ import re import sys +import python_utils +from . import docstrings_checker + _PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) _PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4') sys.path.insert(0, _PYLINT_PATH) -import astroid -from pylint import checkers -from pylint import interfaces -from pylint.checkers import typecheck -from pylint.checkers import utils as checker_utils - -import python_utils # isort:skip -from . import docstrings_checker # isort:skip +# pylint: disable=wrong-import-order +# pylint: disable=wrong-import-position +import astroid # isort:skip +from pylint import checkers # isort:skip +from pylint import interfaces # isort:skip +from pylint.checkers import typecheck # isort:skip +from pylint.checkers import utils as checker_utils # isort:skip +# pylint: enable=wrong-import-position +# pylint: enable=wrong-import-order def read_from_node(node): diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 146f8d4002fd..06b2ec3699eb 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -308,7 +308,7 @@ def _get_test_target_classes(path): return result -def main(argv=None): +def main(args=None): """Run the tests.""" setup.main() setup_gae.main() @@ -321,7 +321,7 @@ def main(argv=None): import dev_appserver dev_appserver.fix_sys_path() - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) if parsed_args.generate_coverage_report: python_utils.PRINT( 'Checking whether coverage is installed in %s' @@ -333,7 +333,7 @@ def main(argv=None): 'coverage', '4.5.4', os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4')) - build.main(argv=[]) + build.main(args=[]) # Compile typescript files. python_utils.PRINT('Compiling typescript...') @@ -499,4 +499,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index ea9cc5969183..55aad3acd9f6 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -23,7 +23,6 @@ import re import shutil import subprocess -import sys import time import python_utils @@ -102,14 +101,14 @@ def cleanup(): python_utils.PRINT('Done!') -def main(argv=None): +def main(args=None): """Runs the end to end tests.""" setup.main() setup_gae.main() if os.environ.get('TRAVIS'): install_chrome_on_travis.main() - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -145,7 +144,7 @@ def main(argv=None): python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.main(argv=['--prod_env']) + build.main(args=['--prod_env']) app_yaml_filepath = 'app.yaml' else: dev_mode = 'true' @@ -157,7 +156,7 @@ def main(argv=None): python_utils.PRINT( re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.main(argv=[]) + build.main(args=[]) app_yaml_filepath = 'app_dev.yaml' # Start a selenium server using chromedriver 2.41. @@ -249,4 +248,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 14de167cacbe..969b989c5f04 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -19,7 +19,6 @@ import argparse import os import subprocess -import sys import python_utils @@ -50,12 +49,12 @@ action='store_true') -def main(argv=None): +def main(args=None): """Runs the frontend tests.""" setup.main() setup_gae.main() - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) common.print_each_string_after_two_new_lines([ @@ -64,7 +63,7 @@ def main(argv=None): 'on your filesystem.', 'Running test in development environment']) - build.main(argv=[]) + build.main(args=[]) subprocess.call([ os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), @@ -73,7 +72,7 @@ def main(argv=None): if parsed_args.run_minified_tests is True: python_utils.PRINT('Running test in production environment') - build.main(argv=['--prod_env', '--minify_third_party_libs_only']) + build.main(args=['--prod_env', '--minify_third_party_libs_only']) subprocess.call([ os.path.join(common.NODE_MODULES_PATH, 'karma', 'bin', 'karma'), @@ -84,4 +83,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 44e5f9bb6811..1ff6301f0bc6 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -20,7 +20,6 @@ import atexit import os import subprocess -import sys import time import python_utils @@ -90,12 +89,12 @@ def run_performance_test(test_name, xvfb_prefix): '--test_target=core.tests.performance_tests.%s' % test_name]) -def main(argv=None): +def main(args=None): """Main function to run the performance tests.""" setup.main() setup_gae.main() - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -165,4 +164,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index fe331b93f2a9..cb5007bed697 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -28,7 +28,6 @@ import argparse import subprocess -import sys import python_utils @@ -53,12 +52,12 @@ help='optional; if specified, the origin branch to compare against.') -def main(argv=None): +def main(args=None): """Run the presubmit checks.""" # Run Javascript and Python linters. python_utils.PRINT('Linting files since the last commit') - pre_commit_linter.main(argv=[]) + pre_commit_linter.main(args=[]) python_utils.PRINT('Linting passed.') python_utils.PRINT('') @@ -72,7 +71,7 @@ def main(argv=None): '-l']) # Set the origin branch to develop if it's not specified. - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) if parsed_args.branch: branch = parsed_args.branch elif matched_branch_num == '1': @@ -88,7 +87,7 @@ def main(argv=None): if common.FRONTEND_DIR in all_changed_files: # Run frontend unit tests. python_utils.PRINT('Running frontend unit tests') - run_frontend_tests.main(argv=['--run_minified_tests']) + run_frontend_tests.main(args=['--run_minified_tests']) python_utils.PRINT('Frontend tests passed.') else: # If files in common.FRONTEND_DIR were not changed, skip the tests. @@ -98,9 +97,9 @@ def main(argv=None): # Run backend tests. python_utils.PRINT('Running backend tests') - run_backend_tests.main(argv=[]) + run_backend_tests.main(args=[]) python_utils.PRINT('Backend tests passed.') if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) diff --git a/scripts/start.py b/scripts/start.py index e8271c860c2a..5cce5fae9487 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -25,7 +25,6 @@ import os import re import subprocess -import sys import time import python_utils @@ -70,7 +69,7 @@ def cleanup(): time.sleep(1) -def main(argv=None): +def main(args=None): """Starts up a development server running Oppia.""" setup.main() setup_gae.main() @@ -88,7 +87,7 @@ def main(argv=None): 'running at port %s.' % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)]) - parsed_args = _PARSER.parse_args(args=argv) + parsed_args = _PARSER.parse_args(args=args) clear_datastore_arg = ( '' if parsed_args.save_datastore else '--clear_datastore=true') enable_console_arg = ( @@ -103,7 +102,7 @@ def main(argv=None): python_utils.PRINT( re.sub( r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.main(argv=['--prod_env', '--enable_watcher']) + build.main(args=['--prod_env', '--enable_watcher']) app_yaml_filepath = 'app.yaml' else: constants_env_variable = '\'DEV_MODE\': true' @@ -114,7 +113,7 @@ def main(argv=None): python_utils.PRINT( re.sub( r'\'DEV_MODE\': .*', constants_env_variable, line), end='') - build.main(argv=['--enable_watcher']) + build.main(args=['--enable_watcher']) app_yaml_filepath = 'app_dev.yaml' # Set up a local dev instance. @@ -202,4 +201,4 @@ def main(argv=None): if __name__ == '__main__': - main(argv=sys.argv[1:]) + main(args=None) From 320b65ed962e72c16fc5a929c8ee3617934f569c Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 23:22:32 +0530 Subject: [PATCH 126/141] fix permission --- scripts/common.py | 2 ++ scripts/setup.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/common.py b/scripts/common.py index acaac1866a57..32e9dbf41a5f 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -224,6 +224,7 @@ def recursive_chown(path, uid, gid): uid: int. Owner ID to be set. gid: int. Group ID to be set. """ + os.chown(path, uid, gid) for root, directories, filenames in os.walk(path): for directory in directories: os.chown(os.path.join(root, directory), uid, gid) @@ -238,6 +239,7 @@ def recursive_chmod(path, mode): path: str. The path for which mode would be set. mode: int. The mode to be set. """ + os.chmod(path, mode) for root, directories, filenames in os.walk(path): for directory in directories: os.chmod(os.path.join(root, directory), mode) diff --git a/scripts/setup.py b/scripts/setup.py index 3d7ca6feb28c..e0831cacd5e8 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -143,7 +143,7 @@ def main(): # Change ownership of node_modules. # Note: on some machines, these commands seem to take quite a long time. common.recursive_chown(common.NODE_MODULES_PATH, os.getuid(), -1) - common.recursive_chmod(common.NODE_MODULES_PATH, 744) + common.recursive_chmod(common.NODE_MODULES_PATH, 0o744) # Adjust path to support the default Chrome locations for Unix, Windows and # Mac OS. From 127ceada46a053ca9ded1f8c69bb82558f54254e Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Mon, 2 Sep 2019 23:42:12 +0530 Subject: [PATCH 127/141] fix --- scripts/build.py | 2 +- scripts/install_third_party_libs.py | 2 +- scripts/pre_commit_hook.py | 2 +- scripts/pre_commit_linter.py | 2 +- scripts/pre_push_hook.py | 2 +- scripts/run_backend_tests.py | 2 +- scripts/run_e2e_tests.py | 2 +- scripts/run_frontend_tests.py | 2 +- scripts/run_performance_tests.py | 2 +- scripts/run_presubmit_checks.py | 2 +- scripts/start.py | 4 +++- 11 files changed, 13 insertions(+), 11 deletions(-) diff --git a/scripts/build.py b/scripts/build.py index 553736f7876a..d24b01162575 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -1394,4 +1394,4 @@ def main(args=None): # The 'no coverage' pragma is used as this line is un-testable. This is because # it will only be called when build.py is used as a script. if __name__ == '__main__': # pragma: no cover - main(args=None) + main() diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index d3d0a85fd6fd..03b0f612b151 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -267,4 +267,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index 518f8e3f93d9..a389bfef3874 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -150,4 +150,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/pre_commit_linter.py b/scripts/pre_commit_linter.py index 9c1ff92e971e..e33991283a8a 100644 --- a/scripts/pre_commit_linter.py +++ b/scripts/pre_commit_linter.py @@ -3198,4 +3198,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/pre_push_hook.py b/scripts/pre_push_hook.py index b4e6fb5074ae..ae2cd4944e8e 100755 --- a/scripts/pre_push_hook.py +++ b/scripts/pre_push_hook.py @@ -408,4 +408,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 06b2ec3699eb..640dc9f73c62 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -499,4 +499,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index 55aad3acd9f6..d50e94a91851 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -248,4 +248,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 969b989c5f04..8a1aa80dfcc6 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -83,4 +83,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 1ff6301f0bc6..e18221b066ca 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -164,4 +164,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index cb5007bed697..86694cabc156 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -102,4 +102,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() diff --git a/scripts/start.py b/scripts/start.py index 5cce5fae9487..f014b8cd0ac7 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -31,6 +31,7 @@ from . import build from . import common +from . import install_third_party_libs from . import setup from . import setup_gae @@ -73,6 +74,7 @@ def main(args=None): """Starts up a development server running Oppia.""" setup.main() setup_gae.main() + install_third_party_libs.main(args=[]) # Runs cleanup function on exit. atexit.register(cleanup) @@ -201,4 +203,4 @@ def main(args=None): if __name__ == '__main__': - main(args=None) + main() From 69395829bdfe19864de0fc46949e40ddaacec1fe Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 01:09:23 +0530 Subject: [PATCH 128/141] address comments --- scripts/create_expression_parser.py | 12 ++++++++++-- scripts/install_chrome_on_travis.py | 15 +++++++++++---- scripts/install_third_party.py | 7 ++++++- scripts/install_third_party_libs.py | 10 ++++++---- scripts/run_backend_tests.py | 4 ++-- scripts/run_e2e_tests.py | 6 +++--- scripts/run_frontend_tests.py | 4 ++-- scripts/run_performance_tests.py | 4 ++-- scripts/run_tests.py | 10 +++++----- scripts/setup.py | 10 +++++++++- scripts/setup_gae.py | 10 +++++++++- scripts/start.py | 4 ++-- 12 files changed, 67 insertions(+), 29 deletions(-) diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 1744f9b61453..329624d0e0a9 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -16,6 +16,7 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import argparse import fileinput import os import re @@ -26,10 +27,17 @@ from . import common from . import setup +_PARSER = argparse.ArgumentParser(description=""" + Run this script from the oppia root folder: + python -m scripts.create_expression_parser -def main(): + The root folder MUST be named 'oppia'.""") + + +def main(args=None): """Produces the expression parser.""" - setup.main() + unused_parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) expression_parser_definition = os.path.join( 'core', 'templates', 'dev', 'head', 'expressions', 'parser.pegjs') diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py index 4ebb68af3fba..6b10ce9034f3 100644 --- a/scripts/install_chrome_on_travis.py +++ b/scripts/install_chrome_on_travis.py @@ -14,23 +14,30 @@ """This script should only be ran by Travis to install and provide a constant version of Chrome. -CHROME_SOURCE_URL is an environment variable set in Oppia's Travis repo -settings. It can be found under 'Environment Variables' header here: -https://travis-ci.org/oppia/oppia/settings. """ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import argparse import os import subprocess import python_utils +_PARSER = argparse.ArgumentParser(description=""" + This script should only be ran by Travis to install and provide a constant + version of Chrome.""") -def main(): + +def main(args=None): """Installs and provides a constant version of Chrome.""" + unused_parsed_args = _PARSER.parse_args(args=args) home_directory = os.environ.get('HOME') oppia_dir = os.getcwd() + + # CHROME_SOURCE_URL is an environment variable set in Oppia's Travis repo + # settings. It can be found under 'Environment Variables' header here: + # https://travis-ci.org/oppia/oppia/settings. chrome_source_url = os.environ.get('CHROME_SOURCE_URL') travis_chrome_path = os.path.join( home_directory, '.cache/TravisChrome/', diff --git a/scripts/install_third_party.py b/scripts/install_third_party.py index 955121e428de..7452d07e55e1 100644 --- a/scripts/install_third_party.py +++ b/scripts/install_third_party.py @@ -16,6 +16,7 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import argparse import contextlib import json import os @@ -69,6 +70,9 @@ } } +_PARSER = argparse.ArgumentParser(description=""" + Installation script for Oppia third-party libraries.""") + def download_files(source_url_root, target_dir, source_filenames): """Downloads a group of files and saves them to a given directory. @@ -320,8 +324,9 @@ def download_manifest_files(filepath): dependency_tar_root_name, dependency_target_root_name) -def main(): +def main(args=None): """Installs all the third party libraries.""" + unused_parsed_args = _PARSER.parse_args(args=args) download_manifest_files(MANIFEST_FILE_PATH) diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 03b0f612b151..b71af90c053a 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -56,7 +56,9 @@ # pylint: enable=wrong-import-order # pylint: enable=wrong-import-position -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" + Installation script for Oppia third-party libraries.""") + _PARSER.add_argument( '--nojsrepl', help='optional; if specified, skips installation of skulpt.', @@ -226,8 +228,8 @@ def ensure_pip_library_is_installed(package, version, path): def main(args=None): """Install third-party libraries for Oppia.""" - setup.main() - setup_gae.main() + setup.main(args=[]) + setup_gae.main(args=[]) pip_dependencies = [ ('pylint', '1.9.4', common.OPPIA_TOOLS_DIR), ('Pillow', '6.0.0', common.OPPIA_TOOLS_DIR), @@ -246,7 +248,7 @@ def main(args=None): # Download and install required JS and zip files. python_utils.PRINT('Installing third-party JS libraries and zip files.') - install_third_party.main() + install_third_party.main(args=[]) # Install third-party node modules needed for the build process. subprocess.call([common.NPM_PATH, 'install', '--only=dev']) diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 1ef0b2ed6ed4..5bb5f358a3cc 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -310,8 +310,8 @@ def _get_test_target_classes(path): def main(args=None): """Run the tests.""" - setup.main() - setup_gae.main() + setup.main(args=[]) + setup_gae.main(args=[]) for directory in DIRS_TO_ADD_TO_SYS_PATH: if not os.path.exists(os.path.dirname(directory)): diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index d50e94a91851..f23efae38f02 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -103,10 +103,10 @@ def cleanup(): def main(args=None): """Runs the end to end tests.""" - setup.main() - setup_gae.main() + setup.main(args=[]) + setup_gae.main(args=[]) if os.environ.get('TRAVIS'): - install_chrome_on_travis.main() + install_chrome_on_travis.main(args=[]) parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 8a1aa80dfcc6..271b9e906f4d 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -51,8 +51,8 @@ def main(args=None): """Runs the frontend tests.""" - setup.main() - setup_gae.main() + setup.main(args=[]) + setup_gae.main(args=[]) parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index e18221b066ca..ab78c4b418e4 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -91,8 +91,8 @@ def run_performance_test(test_name, xvfb_prefix): def main(args=None): """Main function to run the performance tests.""" - setup.main() - setup_gae.main() + setup.main(args=[]) + setup_gae.main(args=[]) parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 753b13e5cca0..4003757c2a0b 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -36,22 +36,22 @@ def main(): """Run all the tests.""" - setup.main() - setup_gae.main() + setup.main(args=[]) + setup_gae.main(args=[]) # Run frontend unit tests. python_utils.PRINT('Running frontend unit tests') - run_frontend_tests.main() + run_frontend_tests.main(args=[]) python_utils.PRINT('Frontend tests passed.') # Run backend tests. python_utils.PRINT('Running backend tests') - run_backend_tests.main() + run_backend_tests.main(args=[]) python_utils.PRINT('Backend tests passed.') # Run end-to-end tests. python_utils.PRINT('Running end-to-end tests') - run_e2e_tests.main() + run_e2e_tests.main(args=[]) python_utils.PRINT( 'SUCCESS All frontend, backend and end-to-end tests passed!') diff --git a/scripts/setup.py b/scripts/setup.py index e0831cacd5e8..bf197d0b7efc 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -18,6 +18,7 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import argparse import os import shutil import sys @@ -27,6 +28,12 @@ from . import common +_PARSER = argparse.ArgumentParser(description=""" + This file should not be invoked directly, but called from other Python + scripts. + + Python execution environent set up for all scripts.""") + def delete_directory_tree(directory_path): """Recursively delete an existing directory tree. Does not do anything if @@ -78,8 +85,9 @@ def test_python_version(): raise Exception -def main(): +def main(args=None): """Runs the script to setup Oppia.""" + unused_parsed_args = _PARSER.parse_args(args=args) test_python_version() # The second option allows this script to also be run from deployment diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 6b461bfea133..bad7ee970ae6 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -18,6 +18,7 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import argparse import os import sys import tarfile @@ -27,9 +28,16 @@ from . import common +_PARSER = argparse.ArgumentParser(description=""" + This file should not be invoked directly, but called from other Python + scripts. -def main(): + Python execution environment setup for scripts that require GAE.""") + + +def main(args=None): """Runs the script to setup GAE.""" + unused_parsed_args = _PARSER.parse_args(args=args) coverage_home = os.path.join(common.OPPIA_TOOLS_DIR, 'coverage-4.5.4') # Note that if the following line is changed so that it uses webob_1_1_1, diff --git a/scripts/start.py b/scripts/start.py index f014b8cd0ac7..98c495b0bcc0 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -72,8 +72,8 @@ def cleanup(): def main(args=None): """Starts up a development server running Oppia.""" - setup.main() - setup_gae.main() + setup.main(args=[]) + setup_gae.main(args=[]) install_third_party_libs.main(args=[]) # Runs cleanup function on exit. From 80a9599a829d297515862d012e9142e87e416d67 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 11:10:02 +0530 Subject: [PATCH 129/141] fix e2e --- scripts/run_e2e_tests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index f23efae38f02..ad8b9a5ec97a 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -187,6 +187,9 @@ def main(args=None): python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), app_yaml_filepath), shell=True)) + for process in background_processes: + process.wait() + # Wait for the servers to come up. while not common.is_port_open( PORT_NUMBER_FOR_SELENIUM_SERVER) or not common.is_port_open( @@ -243,9 +246,6 @@ def main(args=None): % parsed_args.sharding_instances, '--suite', parsed_args.suite, '--params.devMode="%s"' % dev_mode]) - for process in background_processes: - process.wait() - if __name__ == '__main__': main() From 5ac03b0a2f414b57925daf2e08d735ded32e7fa8 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 16:59:49 +0530 Subject: [PATCH 130/141] address comments --- core/tests/gae_suite.py | 5 ++-- core/tests/gae_suite_test.py | 6 ++--- scripts/build.py | 14 +++++------ scripts/build_test.py | 6 ++--- scripts/clean.py | 9 +++++++- scripts/create_expression_parser.py | 8 +++---- scripts/install_chrome_on_travis.py | 5 ++-- scripts/install_third_party.py | 3 ++- scripts/install_third_party_libs.py | 10 ++++---- scripts/release_info.py | 11 ++++++--- scripts/run_backend_tests.py | 12 +++++----- scripts/run_e2e_tests.py | 36 +++++++++++++---------------- scripts/run_frontend_tests.py | 17 +++++++------- scripts/run_performance_tests.py | 21 ++++++++--------- scripts/run_presubmit_checks.py | 31 +++++++++++++------------ scripts/run_tests.py | 15 +++++++++++- scripts/setup.py | 8 +++---- scripts/setup_gae.py | 8 +++---- scripts/start.py | 11 +++++---- 19 files changed, 131 insertions(+), 105 deletions(-) diff --git a/core/tests/gae_suite.py b/core/tests/gae_suite.py index b40fe2082cd0..72f1b70cc7d7 100644 --- a/core/tests/gae_suite.py +++ b/core/tests/gae_suite.py @@ -81,8 +81,10 @@ def create_test_suites(test_target=None): top_level_dir=CURR_DIR)]) -def main(): +def main(args=None): """Runs the tests.""" + parsed_args = _PARSER.parse_args(args=args) + for directory in DIRS_TO_ADD_TO_SYS_PATH: if not os.path.exists(os.path.dirname(directory)): raise Exception('Directory %s does not exist.' % directory) @@ -91,7 +93,6 @@ def main(): import dev_appserver dev_appserver.fix_sys_path() - parsed_args = _PARSER.parse_args() suites = create_test_suites(test_target=parsed_args.test_target) results = [unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/core/tests/gae_suite_test.py b/core/tests/gae_suite_test.py index d0196387d9f8..2f0c42360918 100644 --- a/core/tests/gae_suite_test.py +++ b/core/tests/gae_suite_test.py @@ -41,7 +41,7 @@ def test_cannot_add_directory_with_invalid_path(self): assert_raises_regexp_context_manager = self.assertRaisesRegexp( Exception, 'Directory invalid_path does not exist.') with assert_raises_regexp_context_manager, dir_to_add_swap: - gae_suite.main() + gae_suite.main(args=[]) def test_failing_tests(self): @@ -57,7 +57,7 @@ def _mock_create_test_suites(**unused_test_target): 'Test suite failed: 1 tests run, 0 errors, 1 failures.') with create_test_suites_swap, assert_raises_regexp_context_manager: - gae_suite.main() + gae_suite.main(args=[]) def test_no_tests_run_with_invalid_filename(self): @@ -72,4 +72,4 @@ def _mock_create_test_suites(**unused_test_target): Exception, 'No module named invalid_test') with create_test_suites_swap, assert_raises_regexp_context_manager: - gae_suite.main() + gae_suite.main(args=[]) diff --git a/scripts/build.py b/scripts/build.py index d24b01162575..c056d8748426 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -143,7 +143,12 @@ APP_DEV_YAML_FILEPATH = 'app_dev.yaml' APP_YAML_FILEPATH = 'app.yaml' -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" +Creates a third-party directory where all the JS and CSS dependencies are +built and stored. Depending on the options passed to the script, might also +minify third-party libraries and/or generate a build directory. +""") + _PARSER.add_argument( '--prod_env', action='store_true', default=False, dest='prod_mode') _PARSER.add_argument( @@ -1359,12 +1364,7 @@ def compile_typescript_files_continuously(project_dir): def main(args=None): - """The main method of this script. - - Creates a third-party directory where all the JS and CSS dependencies are - built and stored. Depending on the options passed to the script, might also - minify third-party libraries and/or generate a build directory. - """ + """The main method of this script.""" options = _PARSER.parse_args(args=args) # Regenerate /third_party/generated from scratch. safe_delete_directory_tree(THIRD_PARTY_GENERATED_DEV_DIR) diff --git a/scripts/build_test.py b/scripts/build_test.py index d28c110df093..be16809ad0ea 100644 --- a/scripts/build_test.py +++ b/scripts/build_test.py @@ -920,7 +920,7 @@ def mock_compare_file_count(unused_first_dir, unused_second_dir): with ensure_files_exist_swap, build_using_webpack_swap, ( compile_typescript_files_swap), compare_file_count_swap, args_swap: - build.main() + build.main(args=[]) self.assertEqual(check_function_calls, expected_check_function_calls) @@ -950,7 +950,7 @@ def mock_compile_typescript_files_continuously(unused_project_dir): with ensure_files_exist_swap, ( compile_typescript_files_continuously_swap), args_swap: - build.main() + build.main(args=[]) self.assertEqual(check_function_calls, expected_check_function_calls) @@ -982,7 +982,7 @@ def mock_compile_typescript_files(unused_project_dir): with ensure_files_exist_swap, compile_typescript_files_swap, ( assert_raises_regexp_context_manager), args_swap: - build.main() + build.main(args=[]) self.assertEqual(check_function_calls, expected_check_function_calls) diff --git a/scripts/clean.py b/scripts/clean.py index be634a1f7216..778425b0f6ec 100644 --- a/scripts/clean.py +++ b/scripts/clean.py @@ -16,6 +16,7 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import argparse import os import shutil @@ -24,6 +25,10 @@ CURR_DIR = os.path.abspath(os.getcwd()) OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') +_PARSER = argparse.ArgumentParser(description=""" +Deletes temporary and installed files. +""") + def delete_directory_tree(directory_path): """Recursively delete an existing directory tree. Does not do anything if @@ -48,8 +53,10 @@ def delete_file(filepath): os.remove(filepath) -def main(): +def main(args=None): """Runs the script to clean temporary and installed files.""" + unused_parsed_args = _PARSER.parse_args(args=args) + delete_directory_tree(OPPIA_TOOLS_DIR) delete_directory_tree('node_modules/') delete_directory_tree('third_party/') diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 329624d0e0a9..a018d6ed6db8 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -28,10 +28,10 @@ from . import setup _PARSER = argparse.ArgumentParser(description=""" - Run this script from the oppia root folder: - python -m scripts.create_expression_parser - - The root folder MUST be named 'oppia'.""") +Run this script from the oppia root folder: + python -m scripts.create_expression_parser +The root folder MUST be named 'oppia'. +""") def main(args=None): diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py index 6b10ce9034f3..cf97ca18314e 100644 --- a/scripts/install_chrome_on_travis.py +++ b/scripts/install_chrome_on_travis.py @@ -25,8 +25,9 @@ import python_utils _PARSER = argparse.ArgumentParser(description=""" - This script should only be ran by Travis to install and provide a constant - version of Chrome.""") +This script should only be ran by Travis to install and provide a constant +version of Chrome. +""") def main(args=None): diff --git a/scripts/install_third_party.py b/scripts/install_third_party.py index 7452d07e55e1..de4a271d360f 100644 --- a/scripts/install_third_party.py +++ b/scripts/install_third_party.py @@ -71,7 +71,8 @@ } _PARSER = argparse.ArgumentParser(description=""" - Installation script for Oppia third-party libraries.""") +Installation script for Oppia third-party libraries. +""") def download_files(source_url_root, target_dir, source_filenames): diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index b71af90c053a..553ed0fee333 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -57,7 +57,8 @@ # pylint: enable=wrong-import-position _PARSER = argparse.ArgumentParser(description=""" - Installation script for Oppia third-party libraries.""") +Installation script for Oppia third-party libraries. +""") _PARSER.add_argument( '--nojsrepl', @@ -108,7 +109,7 @@ def pip_install(package, version, install_path): install_path]) -def install_skulpt(args): +def install_skulpt(parsed_args): """Download and install Skulpt. Skulpt is built using a Python script included within the Skulpt repository (skulpt.py). This script normally requires GitPython, however the patches to it below @@ -118,7 +119,6 @@ def install_skulpt(args): warning saying its dist command will not work properly without GitPython, but it does actually work due to the patches. """ - parsed_args = _PARSER.parse_args(args=args) no_skulpt = parsed_args.nojsrepl or parsed_args.noskulpt python_utils.PRINT('Checking whether Skulpt is installed in third_party') @@ -228,6 +228,8 @@ def ensure_pip_library_is_installed(package, version, path): def main(args=None): """Install third-party libraries for Oppia.""" + parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) setup_gae.main(args=[]) pip_dependencies = [ @@ -257,7 +259,7 @@ def main(args=None): # 374076889. subprocess.call([common.NPM_PATH, 'dedupe']) - install_skulpt(args) + install_skulpt(parsed_args) # Install pre-commit script. python_utils.PRINT('Installing pre-commit hook for git') diff --git a/scripts/release_info.py b/scripts/release_info.py index 251dde65b23b..9af7a6dbcde1 100644 --- a/scripts/release_info.py +++ b/scripts/release_info.py @@ -55,7 +55,11 @@ Log = collections.namedtuple('Log', ['sha1', 'author', 'email', 'message']) -_PARSER = argparse.ArgumentParser() +_PARSER = argparse.ArgumentParser(description=""" +Script that simplifies releases by collecting various information. +Should be run from the oppia root dir. +""") + _PARSER.add_argument( '--personal_access_token', help=( @@ -293,14 +297,15 @@ def _check_storage_models(current_release): return [item for item in diff_list if item.startswith('core/storage')] -def main(): +def main(args=None): """Collects necessary info and dumps it to disk.""" + parsed_args = _PARSER.parse_args(args=args) + branch_name = _get_current_branch() if not re.match(r'release-\d+\.\d+\.\d+$', branch_name): raise Exception( 'This script should only be run from the latest release branch.') - parsed_args = _PARSER.parse_args() if parsed_args.personal_access_token is None: python_utils.PRINT( 'No personal access token provided, please set up a personal ' diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 5bb5f358a3cc..74092bf3bcb8 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -81,11 +81,10 @@ _PARSER = argparse.ArgumentParser(description=""" - Run this script from the oppia root folder: - - python -m scripts.run_backend_tests - - IMPORTANT: Only one of --test_path and --test_target should be specified""") +Run this script from the oppia root folder: + python -m scripts.run_backend_tests +IMPORTANT: Only one of --test_path and --test_target should be specified. +""") _EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group() _EXCLUSIVE_GROUP.add_argument( @@ -310,6 +309,8 @@ def _get_test_target_classes(path): def main(args=None): """Run the tests.""" + parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) setup_gae.main(args=[]) @@ -321,7 +322,6 @@ def main(args=None): import dev_appserver dev_appserver.fix_sys_path() - parsed_args = _PARSER.parse_args(args=args) if parsed_args.generate_coverage_report: python_utils.PRINT( 'Checking whether coverage is installed in %s' diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py index ad8b9a5ec97a..20dbb384c016 100644 --- a/scripts/run_e2e_tests.py +++ b/scripts/run_e2e_tests.py @@ -35,14 +35,12 @@ from . import setup_gae _PARSER = argparse.ArgumentParser(description=""" - Run this script from the oppia root folder: - - python -m scripts.run_e2e_tests - - The root folder MUST be named 'oppia'. - - Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run - a single test or test suite.""") +Run this script from the oppia root folder: + python -m scripts.run_e2e_tests +The root folder MUST be named 'oppia'. +Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run +a single test or test suite. +""") _PARSER.add_argument( '--skip_install', @@ -103,12 +101,13 @@ def cleanup(): def main(args=None): """Runs the end to end tests.""" + parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) setup_gae.main(args=[]) if os.environ.get('TRAVIS'): install_chrome_on_travis.main(args=[]) - parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) @@ -174,21 +173,18 @@ def main(args=None): # info logs to stderr so we discard them. # TODO(jacob): Find a webdriver or selenium argument that controls log # level. - background_processes = [] - background_processes.append(subprocess.Popen([ - os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), - 'start', '2>/dev/null'])) + subprocess.Popen( + '(%s start 2>/dev/null)&' + % os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), + shell=True) # Start a demo server. - background_processes.append(subprocess.Popen( - 'python %s/dev_appserver.py --host=0.0.0.0 --port=%s ' + subprocess.Popen( + '(python %s/dev_appserver.py --host=0.0.0.0 --port=%s ' '--clear_datastore=yes --dev_appserver_log_level=critical ' - '--log_level=critical --skip_sdk_update_check=true %s' % ( + '--log_level=critical --skip_sdk_update_check=true %s)&' % ( common.GOOGLE_APP_ENGINE_HOME, python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), - app_yaml_filepath), shell=True)) - - for process in background_processes: - process.wait() + app_yaml_filepath), shell=True) # Wait for the servers to come up. while not common.is_port_open( diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index 271b9e906f4d..c93276cdab34 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -29,14 +29,12 @@ from . import setup_gae _PARSER = argparse.ArgumentParser(description=""" - Run this script from the oppia root folder: - - python -m scripts.run_frontend_tests - - The root folder MUST be named 'oppia'. - - Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run - a single test or test suite.""") +Run this script from the oppia root folder: + python -m scripts.run_frontend_tests +The root folder MUST be named 'oppia'. +Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run +a single test or test suite. +""") _PARSER.add_argument( '--skip_install', @@ -51,10 +49,11 @@ def main(args=None): """Runs the frontend tests.""" + parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) setup_gae.main(args=[]) - parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) common.print_each_string_after_two_new_lines([ diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index ab78c4b418e4..e83fca28abf2 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -31,16 +31,14 @@ from . import setup_gae _PARSER = argparse.ArgumentParser(description=""" - The root folder MUST be named 'oppia'. - - Run all tests sequentially: - python -m scripts.run_performance_tests without args in order to run all - tests sequentially. - - Run test for a specific page: - python -m scripts.run_performance_tests --test_name=page_test - - page_test is the name of the file containing that test eg. splash_test.""") +The root folder MUST be named 'oppia'. +Run all tests sequentially: + python -m scripts.run_performance_tests without args in order to run all + tests sequentially. +Run test for a specific page: + python -m scripts.run_performance_tests --test_name=page_test +page_test is the name of the file containing that test eg. splash_test. +""") _PARSER.add_argument( '--skip_install', @@ -91,10 +89,11 @@ def run_performance_test(test_name, xvfb_prefix): def main(args=None): """Main function to run the performance tests.""" + parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) setup_gae.main(args=[]) - parsed_args = _PARSER.parse_args(args=args) install_third_party_libs.maybe_install_dependencies( parsed_args.skip_install, parsed_args.run_minified_tests) diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 86694cabc156..51494f65aefd 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -17,11 +17,6 @@ - Backend Python tests Only when frontend files are changed will it run Frontend Karma unit tests. - -If any of these tests result in errors, this script will terminate. - -Note: The test scripts are arranged in increasing order of time taken. This -enables a broken build to be detected as quickly as possible. """ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules @@ -37,15 +32,21 @@ from . import run_frontend_tests _PARSER = argparse.ArgumentParser(description=""" - Run this script from the oppia root folder prior to opening a PR: - python -m scripts.run_presubmit_checks - - Set the origin branch to compare against by adding - --branch=your_branch or -b=your_branch - - By default, if the current branch tip exists on remote origin, - the current branch is compared against its tip on GitHub. - Otherwise it's compared against 'develop'.""") +Run this script from the oppia root folder prior to opening a PR: + python -m scripts.run_presubmit_checks +Set the origin branch to compare against by adding +--branch=your_branch or -b=your_branch +By default, if the current branch tip exists on remote origin, +the current branch is compared against its tip on GitHub. +Otherwise it's compared against 'develop'. +This script runs the following tests in all cases. +- Javascript and Python Linting +- Backend Python tests +Only when frontend files are changed will it run Frontend Karma unit tests. +If any of these tests result in errors, this script will terminate. +Note: The test scripts are arranged in increasing order of time taken. This +enables a broken build to be detected as quickly as possible. +""") _PARSER.add_argument( '--branch', '-b', @@ -54,6 +55,7 @@ def main(args=None): """Run the presubmit checks.""" + parsed_args = _PARSER.parse_args(args=args) # Run Javascript and Python linters. python_utils.PRINT('Linting files since the last commit') @@ -71,7 +73,6 @@ def main(args=None): '-l']) # Set the origin branch to develop if it's not specified. - parsed_args = _PARSER.parse_args(args=args) if parsed_args.branch: branch = parsed_args.branch elif matched_branch_num == '1': diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 4003757c2a0b..40262ee811b9 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -25,6 +25,8 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import argparse + import python_utils from . import run_backend_tests @@ -33,9 +35,20 @@ from . import setup from . import setup_gae +_PARSER = argparse.ArgumentParser(description=""" +Run this script from the oppia root folder: + python -m scripts.run_tests +This script runs all the tests, in this order: +- Frontend Karma unit tests +- Backend Python tests +- End-to-end Protractor tests +""") + -def main(): +def main(args=None): """Run all the tests.""" + unused_parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) setup_gae.main(args=[]) diff --git a/scripts/setup.py b/scripts/setup.py index bf197d0b7efc..f16c8e150a3d 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -29,10 +29,10 @@ from . import common _PARSER = argparse.ArgumentParser(description=""" - This file should not be invoked directly, but called from other Python - scripts. - - Python execution environent set up for all scripts.""") +This file should not be invoked directly, but called from other Python +scripts. +Python execution environent set up for all scripts. +""") def delete_directory_tree(directory_path): diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index bad7ee970ae6..6b99b001ddb2 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -29,10 +29,10 @@ from . import common _PARSER = argparse.ArgumentParser(description=""" - This file should not be invoked directly, but called from other Python - scripts. - - Python execution environment setup for scripts that require GAE.""") +This file should not be invoked directly, but called from other Python +scripts. +Python execution environment setup for scripts that require GAE. +""") def main(args=None): diff --git a/scripts/start.py b/scripts/start.py index 98c495b0bcc0..31f347f388da 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -36,10 +36,10 @@ from . import setup_gae _PARSER = argparse.ArgumentParser(description=""" - Run the script from the oppia root folder: - python -m scripts.start - - Note that the root folder MUST be named 'oppia'.""") +Run the script from the oppia root folder: + python -m scripts.start +Note that the root folder MUST be named 'oppia'. +""") _PARSER.add_argument( '--save_datastore', @@ -72,6 +72,8 @@ def cleanup(): def main(args=None): """Starts up a development server running Oppia.""" + parsed_args = _PARSER.parse_args(args=args) + setup.main(args=[]) setup_gae.main(args=[]) install_third_party_libs.main(args=[]) @@ -89,7 +91,6 @@ def main(args=None): 'running at port %s.' % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)]) - parsed_args = _PARSER.parse_args(args=args) clear_datastore_arg = ( '' if parsed_args.save_datastore else '--clear_datastore=true') enable_console_arg = ( From 9a84e6f3686162d6340faee46907be4e203fa5e3 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 17:01:34 +0530 Subject: [PATCH 131/141] fix --- scripts/setup.py | 6 +----- scripts/setup_gae.py | 6 +----- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/scripts/setup.py b/scripts/setup.py index f16c8e150a3d..7df452b6f723 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -12,9 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This file should not be invoked directly, but called from other Python -scripts. Python execution environent set up for all scripts. -""" +"""Python execution environent set up for all scripts.""" from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules @@ -29,8 +27,6 @@ from . import common _PARSER = argparse.ArgumentParser(description=""" -This file should not be invoked directly, but called from other Python -scripts. Python execution environent set up for all scripts. """) diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 6b99b001ddb2..9d72854fd615 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -12,9 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This file should not be invoked directly, but called from other Python -scripts. Python execution environment setup for scripts that require GAE. -""" +"""Python execution environment setup for scripts that require GAE.""" from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules @@ -29,8 +27,6 @@ from . import common _PARSER = argparse.ArgumentParser(description=""" -This file should not be invoked directly, but called from other Python -scripts. Python execution environment setup for scripts that require GAE. """) From 770c6f3d1cc4bab958e59b68922a8af930a9b06f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 17:59:29 +0530 Subject: [PATCH 132/141] fix --- scripts/build_test.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/scripts/build_test.py b/scripts/build_test.py index be16809ad0ea..26cd4a883e3f 100644 --- a/scripts/build_test.py +++ b/scripts/build_test.py @@ -916,11 +916,10 @@ def mock_compare_file_count(unused_first_dir, unused_second_dir): build, 'compile_typescript_files', mock_compile_typescript_files) compare_file_count_swap = self.swap( build, '_compare_file_count', mock_compare_file_count) - args_swap = self.swap(sys, 'argv', ['build.py', '--prod_env']) with ensure_files_exist_swap, build_using_webpack_swap, ( - compile_typescript_files_swap), compare_file_count_swap, args_swap: - build.main(args=[]) + compile_typescript_files_swap), compare_file_count_swap: + build.main(args=['--prod_env']) self.assertEqual(check_function_calls, expected_check_function_calls) @@ -946,11 +945,10 @@ def mock_compile_typescript_files_continuously(unused_project_dir): compile_typescript_files_continuously_swap = self.swap( build, 'compile_typescript_files_continuously', mock_compile_typescript_files_continuously) - args_swap = self.swap(sys, 'argv', ['build.py', '--enable_watcher']) with ensure_files_exist_swap, ( - compile_typescript_files_continuously_swap), args_swap: - build.main(args=[]) + compile_typescript_files_continuously_swap): + build.main(args=['--enable_watcher']) self.assertEqual(check_function_calls, expected_check_function_calls) @@ -974,15 +972,13 @@ def mock_compile_typescript_files(unused_project_dir): build, '_ensure_files_exist', mock_ensure_files_exist) compile_typescript_files_swap = self.swap( build, 'compile_typescript_files', mock_compile_typescript_files) - args_swap = self.swap( - sys, 'argv', ['build.py', '--minify_third_party_libs_only']) assert_raises_regexp_context_manager = self.assertRaisesRegexp( Exception, 'minify_third_party_libs_only should not be set in non-prod mode.') with ensure_files_exist_swap, compile_typescript_files_swap, ( - assert_raises_regexp_context_manager), args_swap: - build.main(args=[]) + assert_raises_regexp_context_manager): + build.main(args=['--minify_third_party_libs_only']) self.assertEqual(check_function_calls, expected_check_function_calls) From d713208d42922ac10821145f26ecca14245bd559 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 18:10:48 +0530 Subject: [PATCH 133/141] skip e2e --- .travis.yml | 49 ++++---- scripts/run_e2e_tests.py | 247 --------------------------------------- scripts/run_e2e_tests.sh | 227 +++++++++++++++++++++++++++++++++++ 3 files changed, 251 insertions(+), 272 deletions(-) delete mode 100644 scripts/run_e2e_tests.py create mode 100644 scripts/run_e2e_tests.sh diff --git a/.travis.yml b/.travis.yml index 12d8f452625a..7844eb141232 100644 --- a/.travis.yml +++ b/.travis.yml @@ -82,7 +82,6 @@ before_install: - pip install codecov - pip install pyyaml - export CHROME_BIN=/usr/bin/google-chrome-stable -- export CHROME_SOURCE_URL=https://github.com/webnicer/chrome-downloads/raw/master/x64.deb/google-chrome-stable_67.0.3396.99-1_amd64.deb - export DISPLAY=:99.0 - bash -e /etc/init.d/xvfb start @@ -102,30 +101,30 @@ script: # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'true' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh --generate_coverage_report; fi # - if [ "$RUN_BACKEND_TESTS" == 'true' ] && [ "$REPORT_BACKEND_COVERAGE" == 'false' ] && [ "$EXCLUDE_LOAD_TESTS" == 'false' ]; then bash scripts/run_backend_tests.sh; fi # Run the e2e tests in the production environment (using --prod_env). -- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="accessibility" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="additionalEditorAndPlayerFeatures" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="collections" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="coreEditorAndPlayerFeatures" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_CREATOR_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="creatorDashboard" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="embedding" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationFeedbackTab" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationHistoryTab" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_IMPROVEMENTS_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationImprovementsTab" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationStatisticsTab" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="explorationTranslationTab" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="extensions" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learnerDashboard" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="learner" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="library" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="navigation" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="preferences" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="profileMenu" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="publication" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="skillEditor" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="subscriptions" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicsAndSkillsDashboard" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="topicAndStoryEditor" --prod_env --skip_install; fi -- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry python -m scripts.run_e2e_tests --suite="users" --prod_env --skip_install; fi +- if [ "$RUN_E2E_TESTS_ACCESSIBILITY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="accessibility" --prod_env; fi +- if [ "$RUN_E2E_TESTS_ADDITIONAL_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="additionalEditorAndPlayerFeatures" --prod_env; fi +- if [ "$RUN_E2E_TESTS_COLLECTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="collections" --prod_env; fi +- if [ "$RUN_E2E_TESTS_CORE_EDITOR_AND_PLAYER_FEATURES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="coreEditorAndPlayerFeatures" --prod_env; fi +- if [ "$RUN_E2E_TESTS_CREATOR_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="creatorDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EMBEDDING" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="embedding" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_FEEDBACK_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationFeedbackTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_HISTORY_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationHistoryTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_IMPROVEMENTS_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationImprovementsTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_STATISTICS_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationStatisticsTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXPLORATION_TRANSLATION_TAB" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="explorationTranslationTab" --prod_env; fi +- if [ "$RUN_E2E_TESTS_EXTENSIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="extensions" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LEARNER_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learnerDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LEARNER" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="learner" --prod_env; fi +- if [ "$RUN_E2E_TESTS_LIBRARY" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="library" --prod_env; fi +- if [ "$RUN_E2E_TESTS_NAVIGATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="navigation" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PREFERENCES" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="preferences" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PROFILE_MENU" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="profileMenu" --prod_env; fi +- if [ "$RUN_E2E_TESTS_PUBLICATION" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="publication" --prod_env; fi +- if [ "$RUN_E2E_TESTS_SKILL_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="skillEditor" --prod_env; fi +- if [ "$RUN_E2E_TESTS_SUBSCRIPTIONS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="subscriptions" --prod_env; fi +- if [ "$RUN_E2E_TESTS_TOPICS_AND_SKILLS_DASHBOARD" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicsAndSkillsDashboard" --prod_env; fi +- if [ "$RUN_E2E_TESTS_TOPIC_AND_STORY_EDITOR" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="topicAndStoryEditor" --prod_env; fi +- if [ "$RUN_E2E_TESTS_USERS" == 'true' ]; then travis_retry bash scripts/run_e2e_tests.sh --suite="users" --prod_env; fi # These lines are commented out because these checks are being run on CircleCI # here: https://circleci.com/gh/oppia/oppia # after_success: diff --git a/scripts/run_e2e_tests.py b/scripts/run_e2e_tests.py deleted file mode 100644 index 20dbb384c016..000000000000 --- a/scripts/run_e2e_tests.py +++ /dev/null @@ -1,247 +0,0 @@ -# Copyright 2019 The Oppia Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the 'License'); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an 'AS-IS' BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Runs the end to end tests.""" -from __future__ import absolute_import # pylint: disable=import-only-modules -from __future__ import unicode_literals # pylint: disable=import-only-modules - -import argparse -import atexit -import fileinput -import os -import re -import shutil -import subprocess -import time - -import python_utils - -from . import build -from . import common -from . import install_chrome_on_travis -from . import install_third_party_libs -from . import setup -from . import setup_gae - -_PARSER = argparse.ArgumentParser(description=""" -Run this script from the oppia root folder: - python -m scripts.run_e2e_tests -The root folder MUST be named 'oppia'. -Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run -a single test or test suite. -""") - -_PARSER.add_argument( - '--skip_install', - help='optional; if specified, skips installing dependencies', - action='store_true') -_PARSER.add_argument( - '--run_minified_tests', - help='optional; if specified, runs frontend karma tests on both minified ' - 'and non-minified code', - action='store_true') -_PARSER.add_argument( - '--prod_env', - help='optional; if specified, emulate running Oppia in a production ' - 'environment.', - action='store_true') -_PARSER.add_argument( - '--browserstack', - help='optional; if specified, run the e2e tests on browserstack.', - action='store_true') -_PARSER.add_argument( - '--suite', - help='Performs test for different suites. Performs a full test by default.', - default='full') -_PARSER.add_argument( - '--sharding', - help='optional; if specified, Disables parallelization of protractor tests', - action='store_true') -_PARSER.add_argument( - '--sharding_instances', - help='Sets the number of parallel browsers to open while sharding', - default='3') - -PORT_NUMBER_FOR_SELENIUM_SERVER = 4444 -PORT_NUMBER_FOR_GAE_SERVER = 9001 -USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START = 8181 - - -def cleanup(): - """Send a kill signal to the dev server and Selenium server.""" - common.kill_process(PORT_NUMBER_FOR_SELENIUM_SERVER) - common.kill_process(PORT_NUMBER_FOR_GAE_SERVER) - - # Wait for the servers to go down; suppress 'connection refused' error - # output from nc since that is exactly what we are expecting to happen. - while common.is_port_open( - PORT_NUMBER_FOR_SELENIUM_SERVER) or common.is_port_open( - PORT_NUMBER_FOR_GAE_SERVER): - time.sleep(1) - - if os.path.isdir(os.path.join('..', 'protractor-screenshots')): - common.print_each_string_after_two_new_lines([ - 'Note: If ADD_SCREENSHOT_REPORTER is set to true in', - 'core/tests/protractor.conf.js, you can view screenshots', - 'of the failed tests in ../protractor-screenshots/']) - - python_utils.PRINT('Done!') - - -def main(args=None): - """Runs the end to end tests.""" - parsed_args = _PARSER.parse_args(args=args) - - setup.main(args=[]) - setup_gae.main(args=[]) - if os.environ.get('TRAVIS'): - install_chrome_on_travis.main(args=[]) - - install_third_party_libs.maybe_install_dependencies( - parsed_args.skip_install, parsed_args.run_minified_tests) - - if common.is_port_open(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START): - common.print_each_string_after_two_new_lines([ - 'There is already a server running on localhost:%s.' - % python_utils.UNICODE(USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START), - 'Please terminate it before running the end-to-end tests.', - 'Exiting.']) - raise Exception - - if common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): - common.print_each_string_after_two_new_lines([ - 'There is already a server running on localhost:%s.' - % python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), - 'Please terminate it before running the end-to-end tests.', - 'Exiting.']) - raise Exception - - # Forces the cleanup function to run on exit. - # Developers: note that at the end of this script, the cleanup() function at - # the top of the file is run. - atexit.register(cleanup) - - if parsed_args.prod_env: - dev_mode = 'false' - python_utils.PRINT('Generating files for production mode...') - constants_env_variable = '\'DEV_MODE\': false' - for line in fileinput.input( - files=[os.path.join('assets', 'constants.ts')], inplace=True): - # Inside this loop the STDOUT will be redirected to the file, - # constants.ts. The end='' is needed to avoid double line breaks. - python_utils.PRINT( - re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), - end='') - build.main(args=['--prod_env']) - app_yaml_filepath = 'app.yaml' - else: - dev_mode = 'true' - constants_env_variable = '\'DEV_MODE\': true' - for line in fileinput.input( - files=[os.path.join('assets', 'constants.ts')], inplace=True): - # Inside this loop the STDOUT will be redirected to the file, - # constants.ts. The end='' is needed to avoid double line breaks. - python_utils.PRINT( - re.sub(r'\'DEV_MODE\': .*', constants_env_variable, line), - end='') - build.main(args=[]) - app_yaml_filepath = 'app_dev.yaml' - - # Start a selenium server using chromedriver 2.41. - # The 'detach' option continues the flow once the server is up and runnning. - # The 'quiet' option prints only the necessary information about the server - # start-up process. - subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), - 'update', '--versions.chrome', '2.41']) - subprocess.call([ - os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), - 'start', '--versions.chrome', '2.41', '--detach', '--quiet']) - - # Start a selenium process. The program sends thousands of lines of useless - # info logs to stderr so we discard them. - # TODO(jacob): Find a webdriver or selenium argument that controls log - # level. - subprocess.Popen( - '(%s start 2>/dev/null)&' - % os.path.join(common.NODE_MODULES_PATH, '.bin', 'webdriver-manager'), - shell=True) - # Start a demo server. - subprocess.Popen( - '(python %s/dev_appserver.py --host=0.0.0.0 --port=%s ' - '--clear_datastore=yes --dev_appserver_log_level=critical ' - '--log_level=critical --skip_sdk_update_check=true %s)&' % ( - common.GOOGLE_APP_ENGINE_HOME, - python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER), - app_yaml_filepath), shell=True) - - # Wait for the servers to come up. - while not common.is_port_open( - PORT_NUMBER_FOR_SELENIUM_SERVER) or not common.is_port_open( - PORT_NUMBER_FOR_GAE_SERVER): - time.sleep(1) - - # Delete outdated screenshots. - if os.path.isdir(os.path.join('..', 'protractor-screenshots')): - shutil.rmtree(os.path.join('..', 'protractor-screenshots')) - - # Run the end-to-end tests. The conditional is used to run protractor - # without any sharding parameters if it is disabled. This helps with - # isolated tests. Isolated tests do not work properly unless no sharding - # parameters are passed in at all. - # TODO(bhenning): Figure out if this is a bug with protractor. - if not parsed_args.browserstack: - if not parsed_args.sharding or parsed_args.sharding_instances == '1': - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join('core', 'tests', 'protractor.conf.js'), '--suite', - parsed_args.suite, '--params.devMode="%s"' % dev_mode]) - else: - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join('core', 'tests', 'protractor.conf.js'), - '--capabilities.shardTestFiles=%s' % parsed_args.sharding, - '--capabilities.maxInstances=%s' - % parsed_args.sharding_instances, '--suite', parsed_args.suite, - '--params.devMode="%s"' % dev_mode]) - else: - python_utils.PRINT('Running the tests on browserstack...') - if not parsed_args.sharding or parsed_args.sharding_instances == '1': - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join( - 'core', 'tests', 'protractor-browserstack.conf.js'), - '--suite', parsed_args.suite, - '--params.devMode="%s"' % dev_mode]) - else: - subprocess.call([ - os.path.join( - common.NODE_MODULES_PATH, 'protractor', 'bin', - 'protractor'), - os.path.join( - 'core', 'tests', 'protractor-browserstack.conf.js'), - '--capabilities.shardTestFiles=%s' % parsed_args.sharding, - '--capabilities.maxInstances=%s' - % parsed_args.sharding_instances, '--suite', parsed_args.suite, - '--params.devMode="%s"' % dev_mode]) - - -if __name__ == '__main__': - main() diff --git a/scripts/run_e2e_tests.sh b/scripts/run_e2e_tests.sh new file mode 100644 index 000000000000..ee73165dcad5 --- /dev/null +++ b/scripts/run_e2e_tests.sh @@ -0,0 +1,227 @@ +#!/usr/bin/env bash + +# Copyright 2014 The Oppia Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS-IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +########################################################################## + +# INSTRUCTIONS: +# +# Run this script from the oppia root folder: +# bash scripts/run_e2e_tests.sh +# +# Optional arguments: +# --browserstack Run the tests on browserstack using the +# protractor-browserstack.conf.js file. +# --skip-install=true/false If true, skips installing dependencies. The +# default value is false. +# --sharding=true/false Disables/Enables parallelization of protractor tests. +# --sharding-instances=# Sets the number of parallel browsers to open while +# sharding. +# --prod_env Run the tests in prod mode. Static resources are served from +# build directory and use cache slugs. +# Sharding must be disabled (either by passing in false to --sharding or 1 to +# --sharding-instances) if running any tests in isolation (fit or fdescribe). +# --suite=suite_name Performs test for different suites, here suites are the +# name of the test files present in core/tests/protractor_desktop/ and +# core/test/protractor/ dirs. e.g. for the file +# core/tests/protractor/accessibility.js use --suite=accessibility. +# For performing a full test, no argument is required. +# +# The root folder MUST be named 'oppia'. +# +# Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run a +# single test or test suite. + +function cleanup { + # Send a kill signal to the dev server and Selenium server. The awk command + # gets just the process ID from the grepped line. + kill `ps aux | grep "[Dd]ev_appserver.py --host=0.0.0.0 --port=9001" | awk '{print $2}'` + kill `ps aux | grep node_modules/webdriver-manager/selenium | awk '{print $2}'` + + # Wait for the servers to go down; suppress "connection refused" error output + # from nc since that is exactly what we are expecting to happen. + while ( nc -vz localhost 4444 >/dev/null 2>&1 ); do sleep 1; done + while ( nc -vz localhost 9001 >/dev/null 2>&1 ); do sleep 1; done + + if [ -d "../protractor-screenshots" ]; then + echo "" + echo " Note: If ADD_SCREENSHOT_REPORTER is set to true in" + echo " core/tests/protractor.conf.js, you can view screenshots" + echo " of the failed tests in ../protractor-screenshots/" + echo "" + fi + + echo Done! +} + +if [ -z "$BASH_VERSION" ] +then + echo "" + echo " Please run me using bash: " + echo "" + echo " bash $0" + echo "" + return 1 +fi + +set -e +python -m scripts.setup +python -m scripts.setup_gae +if [ "$TRAVIS" == 'true' ]; then + python -m scripts.install_chrome_on_travis +fi + +if ( nc -vz localhost 8181 ); then + echo "" + echo " There is already a server running on localhost:8181." + echo " Please terminate it before running the end-to-end tests." + echo " Exiting." + echo "" + exit 1 +fi + +if ( nc -vz localhost 9001 ); then + echo "" + echo " There is a already a server running on localhost:9001." + echo " Please terminate it before running the end-to-end tests." + echo " Exiting." + echo "" + exit 1 +fi + + +# Forces the cleanup function to run on exit. +# Developers: note that at the end of this script, the cleanup() function at +# the top of the file is run. +trap cleanup EXIT + +# Argument passed to feconf.py to help choose production templates folder. +DEV_MODE=true +RUN_ON_BROWSERSTACK=False +for arg in "$@"; do + # Used to emulate running Oppia in a production environment. + if [ "$arg" == "--prod_env" ]; then + DEV_MODE=false + echo " Generating files for production mode..." + fi + + # Used to run the e2e tests on browserstack. + if [ "$arg" == "--browserstack" ]; then + RUN_ON_BROWSERSTACK=True + echo " Running the tests on browserstack..." + fi +done + +if [[ "DEV_MODE" == "true" ]]; then + constants_env_variable="\"DEV_MODE\": true" + sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts + python -m scripts.build + APP_YAML_FILEPATH="app_dev.yaml" +else + constants_env_variable="\"DEV_MODE\": false" + sed -i.bak -e s/"\"DEV_MODE\": .*"/"$constants_env_variable"/ assets/constants.ts + python -m scripts.build --prod_env + APP_YAML_FILEPATH="app.yaml" +fi + +# Delete the modified feconf.py file(-i.bak) +rm assets/constants.ts.bak + +# Start a selenium server using chromedriver 2.41. +# The 'detach' option continues the flow once the server is up and runnning. +# The 'quiet' option prints only the necessary information about the server start-up +# process. +node_modules/.bin/webdriver-manager update --versions.chrome 2.41 +node_modules/.bin/webdriver-manager start --versions.chrome 2.41 --detach --quiet + +# Start a selenium process. The program sends thousands of lines of useless +# info logs to stderr so we discard them. +# TODO(jacob): Find a webdriver or selenium argument that controls log level. +(node_modules/.bin/webdriver-manager start 2>/dev/null)& +# Start a demo server. +(python ../oppia_tools/google_appengine_1.9.67/google_appengine/dev_appserver.py --host=0.0.0.0 --port=9001 --clear_datastore=yes --dev_appserver_log_level=critical --log_level=critical --skip_sdk_update_check=true $APP_YAML_FILEPATH)& + +# Wait for the servers to come up. +while ! nc -vz localhost 4444; do sleep 1; done +while ! nc -vz localhost 9001; do sleep 1; done + +# Delete outdated screenshots +if [ -d "../protractor-screenshots" ]; then + rm -r ../protractor-screenshots +fi + +# Parse additional command line arguments that may be passed to protractor. +# Credit: http://stackoverflow.com/questions/192249 +# Passing different suites and sharding parameters for tests. +SUITE="full" +SHARDING=true +SHARD_INSTANCES=3 +for j in "$@"; do + # Match each space-separated argument passed to the shell file to a separate + # case label, based on a pattern. E.g. Match to -suite=*, -sharding=*, where the + # asterisk refers to any characters following the equals sign, other than + # whitespace. + case $j in + --suite=*) + # Extract the value right of the equal sign by substringing the $i variable + # at the equal sign. + # http://tldp.org/LDP/abs/html/string-manipulation.html + SUITE="${j#*=}" + # Shifts the argument parameters over by one. E.g. $2 becomes $1, etc. + shift + ;; + + --sharding=*) + SHARDING="${j#*=}" + shift + ;; + + --sharding-instances=*) + SHARD_INSTANCES="${j#*=}" + shift + ;; + + --prod_env*) + shift + ;; + + --browserstack*) + shift + ;; + + *) + echo "Error: Unknown command line option: $j" + ;; + esac +done + +# Run the end-to-end tests. The conditional is used to run protractor without +# any sharding parameters if it is disabled. This helps with isolated tests. +# Isolated tests do not work properly unless no sharding parameters are passed +# in at all. +# TODO(bhenning): Figure out if this is a bug with protractor. +if [ "$RUN_ON_BROWSERSTACK" == "False" ]; then + if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then + node_modules/protractor/bin/protractor core/tests/protractor.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE" + else + node_modules/protractor/bin/protractor core/tests/protractor.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE" + fi +else + if [ "$SHARDING" = "false" ] || [ "$SHARD_INSTANCES" = "1" ]; then + node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --suite "$SUITE" --params.devMode="$DEV_MODE" + else + node_modules/protractor/bin/protractor core/tests/protractor-browserstack.conf.js --capabilities.shardTestFiles="$SHARDING" --capabilities.maxInstances=$SHARD_INSTANCES --suite "$SUITE" --params.devMode="$DEV_MODE" + fi +fi From c3006d265f7b411ab042beefc400bd39e267e9dc Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 19:04:33 +0530 Subject: [PATCH 134/141] remove yaml --- .circleci/config.yml | 1 - .travis.yml | 1 - scripts/third_party_size_check.py | 8 ++++++-- utils.py | 6 +++++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5bfcfc8a56c0..f515e269e4e0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,6 @@ anchor_for_job_defaults: &job_defaults anchor_for_installing_dependencies: &install_dependencies name: Install dependencies command: | - sudo pip install pyyaml python -m scripts.install_third_party_libs anchor_for_installing_cc_test_reporter: &install_cc diff --git a/.travis.yml b/.travis.yml index 7844eb141232..502f800a7c10 100644 --- a/.travis.yml +++ b/.travis.yml @@ -80,7 +80,6 @@ before_install: - source /opt/jdk_switcher/jdk_switcher.sh - jdk_switcher use oraclejdk8 - pip install codecov -- pip install pyyaml - export CHROME_BIN=/usr/bin/google-chrome-stable - export DISPLAY=:99.0 - bash -e /etc/init.d/xvfb start diff --git a/scripts/third_party_size_check.py b/scripts/third_party_size_check.py index 4194945979e8..5c6f9e0bc29a 100644 --- a/scripts/third_party_size_check.py +++ b/scripts/third_party_size_check.py @@ -23,9 +23,13 @@ import os import sys -import yaml -import python_utils # isort:skip +import python_utils + +_YAML_PATH = os.path.join(os.getcwd(), '..', 'oppia_tools', 'pyyaml-5.1.2') +sys.path.insert(0, _YAML_PATH) + +import yaml # isort:skip #pylint: disable=wrong-import-position THIRD_PARTY_PATH = os.path.join(os.getcwd(), 'third_party') THIRD_PARTY_SIZE_LIMIT = 7000 diff --git a/utils.py b/utils.py index 7aaa8c17a3dd..915d4c90c215 100644 --- a/utils.py +++ b/utils.py @@ -26,6 +26,7 @@ import random import re import string +import sys import time import unicodedata @@ -33,7 +34,10 @@ import feconf import python_utils -import yaml +_YAML_PATH = os.path.join(os.getcwd(), '..', 'oppia_tools', 'pyyaml-5.1.2') +sys.path.insert(0, _YAML_PATH) + +import yaml # isort:skip #pylint: disable=wrong-import-position class InvalidInputException(Exception): From 2459c84b58c5e26b16a9787026af831f7cbbcd7b Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Tue, 3 Sep 2019 19:41:45 +0530 Subject: [PATCH 135/141] fix lint --- scripts/build_test.py | 1 - scripts/run_tests.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/scripts/build_test.py b/scripts/build_test.py index 26cd4a883e3f..81e2519da013 100644 --- a/scripts/build_test.py +++ b/scripts/build_test.py @@ -24,7 +24,6 @@ import os import random import subprocess -import sys import tempfile import threading diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 40262ee811b9..11342b2ced8b 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -26,11 +26,11 @@ from __future__ import unicode_literals # pylint: disable=import-only-modules import argparse +import subprocess import python_utils from . import run_backend_tests -from . import run_e2e_tests from . import run_frontend_tests from . import setup from . import setup_gae @@ -64,7 +64,7 @@ def main(args=None): # Run end-to-end tests. python_utils.PRINT('Running end-to-end tests') - run_e2e_tests.main(args=[]) + subprocess.Popen('bash scripts/run_e2e_tests.sh', shell=True) python_utils.PRINT( 'SUCCESS All frontend, backend and end-to-end tests passed!') From 89f61e63de49aaa45042aa6a9db024cf79b4bd58 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Thu, 5 Sep 2019 01:22:34 +0530 Subject: [PATCH 136/141] use single quotes --- scripts/build.py | 10 ++++----- scripts/clean.py | 5 ++--- scripts/create_expression_parser.py | 9 ++++---- scripts/install_chrome_on_travis.py | 9 ++++---- scripts/install_third_party.py | 5 ++--- scripts/install_third_party_libs.py | 5 ++--- scripts/release_info.py | 7 +++--- scripts/run_backend_tests.py | 9 ++++---- scripts/run_frontend_tests.py | 13 ++++++------ scripts/run_performance_tests.py | 17 +++++++-------- scripts/run_presubmit_checks.py | 33 +++++++++++++++-------------- scripts/run_tests.py | 15 ++++++------- scripts/setup.py | 5 ++--- scripts/setup_gae.py | 6 +++--- scripts/start.py | 9 ++++---- 15 files changed, 73 insertions(+), 84 deletions(-) diff --git a/scripts/build.py b/scripts/build.py index c056d8748426..c65a49be4a90 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -143,11 +143,11 @@ APP_DEV_YAML_FILEPATH = 'app_dev.yaml' APP_YAML_FILEPATH = 'app.yaml' -_PARSER = argparse.ArgumentParser(description=""" -Creates a third-party directory where all the JS and CSS dependencies are -built and stored. Depending on the options passed to the script, might also -minify third-party libraries and/or generate a build directory. -""") +_PARSER = argparse.ArgumentParser( + description='Creates a third-party directory where all the JS and CSS ' + 'dependencies are built and stored. Depending on the options passed to the ' + 'script, might also minify third-party libraries and/or generate a build ' + 'directory.') _PARSER.add_argument( '--prod_env', action='store_true', default=False, dest='prod_mode') diff --git a/scripts/clean.py b/scripts/clean.py index 778425b0f6ec..4eeea439bc9a 100644 --- a/scripts/clean.py +++ b/scripts/clean.py @@ -25,9 +25,8 @@ CURR_DIR = os.path.abspath(os.getcwd()) OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') -_PARSER = argparse.ArgumentParser(description=""" -Deletes temporary and installed files. -""") +_PARSER = argparse.ArgumentParser( + description='Deletes temporary and installed files.') def delete_directory_tree(directory_path): diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index a018d6ed6db8..49eb36b0a76c 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -27,11 +27,10 @@ from . import common from . import setup -_PARSER = argparse.ArgumentParser(description=""" -Run this script from the oppia root folder: - python -m scripts.create_expression_parser -The root folder MUST be named 'oppia'. -""") +_PARSER = argparse.ArgumentParser( + description='Run this script from the oppia root folder: ' + 'python -m scripts.create_expression_parser. ' + 'The root folder MUST be named \'oppia\'.') def main(args=None): diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py index cf97ca18314e..ced69df34185 100644 --- a/scripts/install_chrome_on_travis.py +++ b/scripts/install_chrome_on_travis.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This script should only be ran by Travis to install and provide a constant +"""This script should only be run by Travis to install and provide a constant version of Chrome. """ from __future__ import absolute_import # pylint: disable=import-only-modules @@ -24,10 +24,9 @@ import python_utils -_PARSER = argparse.ArgumentParser(description=""" -This script should only be ran by Travis to install and provide a constant -version of Chrome. -""") +_PARSER = argparse.ArgumentParser( + description='This script should only be run by Travis to install and ' + 'provide a constant version of Chrome.') def main(args=None): diff --git a/scripts/install_third_party.py b/scripts/install_third_party.py index de4a271d360f..12bc4c678fae 100644 --- a/scripts/install_third_party.py +++ b/scripts/install_third_party.py @@ -70,9 +70,8 @@ } } -_PARSER = argparse.ArgumentParser(description=""" -Installation script for Oppia third-party libraries. -""") +_PARSER = argparse.ArgumentParser( + description='Installation script for Oppia third-party libraries.') def download_files(source_url_root, target_dir, source_filenames): diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 553ed0fee333..bdb4876b05b9 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -56,9 +56,8 @@ # pylint: enable=wrong-import-order # pylint: enable=wrong-import-position -_PARSER = argparse.ArgumentParser(description=""" -Installation script for Oppia third-party libraries. -""") +_PARSER = argparse.ArgumentParser( + description='Installation script for Oppia third-party libraries.') _PARSER.add_argument( '--nojsrepl', diff --git a/scripts/release_info.py b/scripts/release_info.py index 9af7a6dbcde1..5dc6be943d08 100644 --- a/scripts/release_info.py +++ b/scripts/release_info.py @@ -55,10 +55,9 @@ Log = collections.namedtuple('Log', ['sha1', 'author', 'email', 'message']) -_PARSER = argparse.ArgumentParser(description=""" -Script that simplifies releases by collecting various information. -Should be run from the oppia root dir. -""") +_PARSER = argparse.ArgumentParser( + description='Script that simplifies releases by collecting various ' + 'information. Should be run from the oppia root dir.') _PARSER.add_argument( '--personal_access_token', diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 74092bf3bcb8..06f5bc7e5e48 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -80,11 +80,10 @@ _LOAD_TESTS_DIR = os.path.join(os.getcwd(), 'core', 'tests', 'load_tests') -_PARSER = argparse.ArgumentParser(description=""" -Run this script from the oppia root folder: - python -m scripts.run_backend_tests -IMPORTANT: Only one of --test_path and --test_target should be specified. -""") +_PARSER = argparse.ArgumentParser( + description='Run this script from the oppia root folder: ' + 'python -m scripts.run_backend_tests. ' + 'IMPORTANT: Only one of --test_path and --test_target should be specified.') _EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group() _EXCLUSIVE_GROUP.add_argument( diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index c93276cdab34..a8c783272ff8 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -28,13 +28,12 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser(description=""" -Run this script from the oppia root folder: - python -m scripts.run_frontend_tests -The root folder MUST be named 'oppia'. -Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run -a single test or test suite. -""") +_PARSER = argparse.ArgumentParser( + description='Run this script from the oppia root folder: ' + 'python -m scripts.run_frontend_tests. ' + 'The root folder MUST be named \'oppia\'. ' + 'Note: You can replace \'it\' with \'fit\' or \'describe\' with ' + '\'fdescribe\' to run a single test or test suite.') _PARSER.add_argument( '--skip_install', diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index e83fca28abf2..f6f2d9090d7f 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -30,15 +30,14 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser(description=""" -The root folder MUST be named 'oppia'. -Run all tests sequentially: - python -m scripts.run_performance_tests without args in order to run all - tests sequentially. -Run test for a specific page: - python -m scripts.run_performance_tests --test_name=page_test -page_test is the name of the file containing that test eg. splash_test. -""") +_PARSER = argparse.ArgumentParser( + description='The root folder MUST be named \'oppia\'. ' + 'Run all tests sequentially: ' + 'python -m scripts.run_performance_tests without args in order to run all ' + 'tests sequentially. ' + 'Run test for a specific page: ' + 'python -m scripts.run_performance_tests --test_name=page_test. ' + 'page_test is the name of the file containing that test eg. splash_test.') _PARSER.add_argument( '--skip_install', diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index 51494f65aefd..f979ef44ccd7 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -31,22 +31,23 @@ from . import run_backend_tests from . import run_frontend_tests -_PARSER = argparse.ArgumentParser(description=""" -Run this script from the oppia root folder prior to opening a PR: - python -m scripts.run_presubmit_checks -Set the origin branch to compare against by adding ---branch=your_branch or -b=your_branch -By default, if the current branch tip exists on remote origin, -the current branch is compared against its tip on GitHub. -Otherwise it's compared against 'develop'. -This script runs the following tests in all cases. -- Javascript and Python Linting -- Backend Python tests -Only when frontend files are changed will it run Frontend Karma unit tests. -If any of these tests result in errors, this script will terminate. -Note: The test scripts are arranged in increasing order of time taken. This -enables a broken build to be detected as quickly as possible. -""") +_PARSER = argparse.ArgumentParser( + description='Run this script from the oppia root folder prior to opening a ' + 'PR: ' + 'python -m scripts.run_presubmit_checks ' + 'Set the origin branch to compare against by adding ' + '--branch=your_branch or -b=your_branch ' + 'By default, if the current branch tip exists on remote origin, ' + 'the current branch is compared against its tip on GitHub. ' + 'Otherwise it\'s compared against \'develop\'. ' + 'This script runs the following tests in all cases. ' + '- Javascript and Python Linting ' + '- Backend Python tests ' + 'Only when frontend files are changed will it run Frontend Karma unit ' + 'tests. ' + 'If any of these tests result in errors, this script will terminate. ' + 'Note: The test scripts are arranged in increasing order of time taken. ' + 'This enables a broken build to be detected as quickly as possible.') _PARSER.add_argument( '--branch', '-b', diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 11342b2ced8b..489d0eac145b 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -35,14 +35,13 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser(description=""" -Run this script from the oppia root folder: - python -m scripts.run_tests -This script runs all the tests, in this order: -- Frontend Karma unit tests -- Backend Python tests -- End-to-end Protractor tests -""") +_PARSER = argparse.ArgumentParser( + description='Run this script from the oppia root folder: ' + 'python -m scripts.run_tests ' + 'This script runs all the tests, in this order: ' + '- Frontend Karma unit tests ' + '- Backend Python tests ' + '- End-to-end Protractor tests ') def main(args=None): diff --git a/scripts/setup.py b/scripts/setup.py index 7df452b6f723..796273900116 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -26,9 +26,8 @@ from . import common -_PARSER = argparse.ArgumentParser(description=""" -Python execution environent set up for all scripts. -""") +_PARSER = argparse.ArgumentParser( + description='Python execution environent set up for all scripts.') def delete_directory_tree(directory_path): diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index 9d72854fd615..d95b9d75d186 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -26,9 +26,9 @@ from . import common -_PARSER = argparse.ArgumentParser(description=""" -Python execution environment setup for scripts that require GAE. -""") +_PARSER = argparse.ArgumentParser( + description='Python execution environment setup for scripts that require ' + 'GAE.') def main(args=None): diff --git a/scripts/start.py b/scripts/start.py index 31f347f388da..3eebd04112ef 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -35,11 +35,10 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser(description=""" -Run the script from the oppia root folder: - python -m scripts.start -Note that the root folder MUST be named 'oppia'. -""") +_PARSER = argparse.ArgumentParser( + description='Run the script from the oppia root folder: ' + 'python -m scripts.start ' + 'Note that the root folder MUST be named \'oppia\'.') _PARSER.add_argument( '--save_datastore', From 89afe82839a5b779b8b409a941a3a9c6aa7520f1 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 7 Sep 2019 22:52:11 +0530 Subject: [PATCH 137/141] fix conflicts --- scripts/pre_commit_hook.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index e9aac8a0bc46..4bf02c1ee638 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -130,6 +130,7 @@ def main(args=None): 'yarn to add, update, or delete the packages. For more information ' 'on how to use yarn, see https://yarnpkg.com/en/docs/usage.' ) + sys.exit(1) return From 6967564aabdb33bcf896caea58d495642c64431e Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 7 Sep 2019 22:53:18 +0530 Subject: [PATCH 138/141] test --- package-lock.json | 112 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 package-lock.json diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000000..3ca3d68000de --- /dev/null +++ b/package-lock.json @@ -0,0 +1,112 @@ +{ + "name": "oppia", + "version": "2.8.1", + "description": "Oppia enables the creation of interactive online lessons.", + "main": "gulpfile.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/oppia/oppia.git" + }, + "author": "", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/oppia/oppia/issues" + }, + "homepage": "https://github.com/oppia/oppia#readme", + "devDependencies": { + "@angular/common": "^8.0.0", + "@angular/compiler": "^8.0.0", + "@angular/core": "^8.0.0", + "@angular/forms": "^8.0.0", + "@angular/platform-browser": "^8.0.0", + "@angular/platform-browser-dynamic": "^8.0.0", + "@angular/router": "^8.0.0", + "@angular/upgrade": "^8.0.0", + "@mapbox/stylelint-processor-arbitrary-tags": "^0.2.0", + "@types/angular": "^1.6.54", + "@types/angular-animate": "^1.5.10", + "@types/angular-mocks": "^1.7.0", + "@types/ckeditor": "^4.9.2", + "@types/d3": "^5.7.2", + "@types/google.visualization": "0.0.48", + "@types/jasmine": "^3.3.12", + "@types/jasminewd2": "^2.0.6", + "@types/jquery": "^3.3.29", + "@types/jqueryui": "^1.12.1", + "@types/leaflet": "^1.4.0", + "@types/lodash": "^4.14.136", + "@types/mathjax": "0.0.35", + "@types/mathjs": "^5.0.0", + "@types/mousetrap": "^1.6.2", + "@types/node": "^10.14.6", + "@types/q": "^1.5.1", + "@types/select2": "^4.0.48", + "@types/selenium-webdriver": "^4.0.0", + "@typescript-eslint/eslint-plugin": "^2.0.0", + "@typescript-eslint/parser": "^2.0.0", + "ajv": "^6.10.0", + "angular": "1.6.6", + "angular-route": "1.6.6", + "babel-eslint": "^10.0.1", + "bootstrap": "3.4.1", + "browserstack-local": "^1.3.7", + "cache-loader": "^3.0.1", + "clean-webpack-plugin": "^2.0.1", + "core-js": "^2.5.3", + "css-loader": "^3.1.0", + "d3": "^5.9.2", + "dotenv": "^7.0.0", + "enhanced-resolve": "^4.1.0", + "eslint": "^6.0.0", + "eslint-plugin-angular": "^4.0.1", + "eslint-plugin-html": "^6.0.0", + "fork-ts-checker-webpack-plugin": "^1.3.3", + "gulp": "^4.0.1", + "gulp-concat": "^2.6.1", + "html-webpack-plugin": "4.0.0-beta.5", + "htmllint": "^0.8.0", + "htmllint-cli": "github:oppia/htmllint-cli#01a8b74", + "is-docker": "^2.0.0", + "istanbul-instrumenter-loader": "^3.0.1", + "jasmine-core": "^3.4.0", + "jasmine-spec-reporter": "^4.2.1", + "jquery": "^3.4.0", + "karma": "^4.1.0", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-coverage-istanbul-reporter": "^2.0.5", + "karma-jasmine": "^2.0.1", + "karma-json-fixtures-preprocessor": "^0.0.6", + "karma-ng-html2js-preprocessor": "^1.0.0", + "karma-webpack": "^4.0.0-rc.3", + "lodash": "^4.17.14", + "mathjs": "5.10.3", + "moment": "2.19.3", + "postcss-loader": "3.0.0", + "postcss-syntax": "^0.36.2", + "protractor": "^5.4.2", + "protractor-jasmine2-screenshot-reporter": "^0.5.0", + "raw-loader": "3.1.0", + "reflect-metadata": "^0.1.12", + "rxjs": "^6.4.0", + "sourcemapped-stacktrace": "1.1.9", + "style-loader": "^0.23.1", + "stylelint": "^10.0.1", + "stylelint-config-standard": "^18.3.0", + "thread-loader": "^2.1.2", + "ts-loader": "^5.4.4", + "typescript": "^3.4.5", + "uglify-js": "^3.5.8", + "underscore-template-loader": "^1.0.0", + "webdriver-manager": "^12.1.4", + "webpack": "^4.30.0", + "webpack-cli": "^3.3.1", + "webpack-merge": "^4.2.1", + "yargs": "^13.2.2", + "zone.js": "^0.9.1" + }, + "keywords": [] +} From 6ba9902ef66e89257ba7a64234ab2768a50b3fcf Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sat, 7 Sep 2019 23:09:52 +0530 Subject: [PATCH 139/141] fix linr --- package-lock.json | 112 ------------------------------------- scripts/pre_commit_hook.py | 2 +- scripts/setup.py | 2 +- 3 files changed, 2 insertions(+), 114 deletions(-) delete mode 100644 package-lock.json diff --git a/package-lock.json b/package-lock.json deleted file mode 100644 index 3ca3d68000de..000000000000 --- a/package-lock.json +++ /dev/null @@ -1,112 +0,0 @@ -{ - "name": "oppia", - "version": "2.8.1", - "description": "Oppia enables the creation of interactive online lessons.", - "main": "gulpfile.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/oppia/oppia.git" - }, - "author": "", - "license": "Apache-2.0", - "bugs": { - "url": "https://github.com/oppia/oppia/issues" - }, - "homepage": "https://github.com/oppia/oppia#readme", - "devDependencies": { - "@angular/common": "^8.0.0", - "@angular/compiler": "^8.0.0", - "@angular/core": "^8.0.0", - "@angular/forms": "^8.0.0", - "@angular/platform-browser": "^8.0.0", - "@angular/platform-browser-dynamic": "^8.0.0", - "@angular/router": "^8.0.0", - "@angular/upgrade": "^8.0.0", - "@mapbox/stylelint-processor-arbitrary-tags": "^0.2.0", - "@types/angular": "^1.6.54", - "@types/angular-animate": "^1.5.10", - "@types/angular-mocks": "^1.7.0", - "@types/ckeditor": "^4.9.2", - "@types/d3": "^5.7.2", - "@types/google.visualization": "0.0.48", - "@types/jasmine": "^3.3.12", - "@types/jasminewd2": "^2.0.6", - "@types/jquery": "^3.3.29", - "@types/jqueryui": "^1.12.1", - "@types/leaflet": "^1.4.0", - "@types/lodash": "^4.14.136", - "@types/mathjax": "0.0.35", - "@types/mathjs": "^5.0.0", - "@types/mousetrap": "^1.6.2", - "@types/node": "^10.14.6", - "@types/q": "^1.5.1", - "@types/select2": "^4.0.48", - "@types/selenium-webdriver": "^4.0.0", - "@typescript-eslint/eslint-plugin": "^2.0.0", - "@typescript-eslint/parser": "^2.0.0", - "ajv": "^6.10.0", - "angular": "1.6.6", - "angular-route": "1.6.6", - "babel-eslint": "^10.0.1", - "bootstrap": "3.4.1", - "browserstack-local": "^1.3.7", - "cache-loader": "^3.0.1", - "clean-webpack-plugin": "^2.0.1", - "core-js": "^2.5.3", - "css-loader": "^3.1.0", - "d3": "^5.9.2", - "dotenv": "^7.0.0", - "enhanced-resolve": "^4.1.0", - "eslint": "^6.0.0", - "eslint-plugin-angular": "^4.0.1", - "eslint-plugin-html": "^6.0.0", - "fork-ts-checker-webpack-plugin": "^1.3.3", - "gulp": "^4.0.1", - "gulp-concat": "^2.6.1", - "html-webpack-plugin": "4.0.0-beta.5", - "htmllint": "^0.8.0", - "htmllint-cli": "github:oppia/htmllint-cli#01a8b74", - "is-docker": "^2.0.0", - "istanbul-instrumenter-loader": "^3.0.1", - "jasmine-core": "^3.4.0", - "jasmine-spec-reporter": "^4.2.1", - "jquery": "^3.4.0", - "karma": "^4.1.0", - "karma-chrome-launcher": "^2.2.0", - "karma-coverage": "^1.1.2", - "karma-coverage-istanbul-reporter": "^2.0.5", - "karma-jasmine": "^2.0.1", - "karma-json-fixtures-preprocessor": "^0.0.6", - "karma-ng-html2js-preprocessor": "^1.0.0", - "karma-webpack": "^4.0.0-rc.3", - "lodash": "^4.17.14", - "mathjs": "5.10.3", - "moment": "2.19.3", - "postcss-loader": "3.0.0", - "postcss-syntax": "^0.36.2", - "protractor": "^5.4.2", - "protractor-jasmine2-screenshot-reporter": "^0.5.0", - "raw-loader": "3.1.0", - "reflect-metadata": "^0.1.12", - "rxjs": "^6.4.0", - "sourcemapped-stacktrace": "1.1.9", - "style-loader": "^0.23.1", - "stylelint": "^10.0.1", - "stylelint-config-standard": "^18.3.0", - "thread-loader": "^2.1.2", - "ts-loader": "^5.4.4", - "typescript": "^3.4.5", - "uglify-js": "^3.5.8", - "underscore-template-loader": "^1.0.0", - "webdriver-manager": "^12.1.4", - "webpack": "^4.30.0", - "webpack-cli": "^3.3.1", - "webpack-merge": "^4.2.1", - "yargs": "^13.2.2", - "zone.js": "^0.9.1" - }, - "keywords": [] -} diff --git a/scripts/pre_commit_hook.py b/scripts/pre_commit_hook.py index 4bf02c1ee638..2c2aedd36f1a 100755 --- a/scripts/pre_commit_hook.py +++ b/scripts/pre_commit_hook.py @@ -120,7 +120,7 @@ def main(args=None): python_utils.PRINT('Running pre-commit check for package-lock.json ...') if _does_diff_include_package_lock_file() and ( - not _does_current_folder_contain_have_package_lock_file()): + _does_current_folder_contain_have_package_lock_file()): # The following message is necessary since there git commit aborts # quietly when the status is non-zero. python_utils.PRINT('-----------COMMIT ABORTED-----------') diff --git a/scripts/setup.py b/scripts/setup.py index 6a90838e481a..434e19415956 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -155,7 +155,7 @@ def main(args=None): if not os.path.exists(common.YARN_PATH): python_utils.PRINT('Removing package-lock.json') clean.delete_file('package-lock.json') - python_utils.print_each_string_after_two_new_lines([ + common.print_each_string_after_two_new_lines([ 'Installing yarn', 'WARNING: Please note that Oppia uses Yarn to manage node packages', 'do *NOT* use npm. For more information on how to use yarn,', From 943759bbdf77be42fd307e3603c064e1c3dab590 Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 8 Sep 2019 22:24:39 +0530 Subject: [PATCH 140/141] add tests --- scripts/common.py | 21 ------- scripts/common_test.py | 92 +++++++++++++++++++++++++++++ scripts/install_third_party_libs.py | 6 -- scripts/pre_commit_linter.py | 1 - scripts/run_performance_tests.py | 19 +++--- 5 files changed, 102 insertions(+), 37 deletions(-) diff --git a/scripts/common.py b/scripts/common.py index ebd6756a5637..8692d89beeaa 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -18,19 +18,11 @@ import contextlib import os -import signal import socket import subprocess -import sys import python_utils -PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) -PSUTIL_PATH = os.path.join(PARENT_DIR, 'oppia_tools', 'psutil-5.6.3') -sys.path.insert(0, PSUTIL_PATH) - -import psutil # isort:skip # pylint: disable=wrong-import-position - RELEASE_BRANCH_NAME_PREFIX = 'release-' CURR_DIR = os.path.abspath(os.getcwd()) OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') @@ -216,19 +208,6 @@ def is_port_open(port): return bool(not s.connect_ex(('localhost', port))) -def kill_process(port): - """Kills a process that is listening to a specific port. - Credits: https://stackoverflow.com/a/20691431/11755830 - - Args: - port: int. The port number. - """ - for process in psutil.process_iter(): - for conns in process.connections(kind='inet'): - if conns.laddr.port == port: - process.send_signal(signal.SIGTERM) - - def recursive_chown(path, uid, gid): """Changes the owner and group id of all files in a path to the numeric uid and gid. diff --git a/scripts/common_test.py b/scripts/common_test.py index a4572d37720e..602fc1755b21 100644 --- a/scripts/common_test.py +++ b/scripts/common_test.py @@ -18,8 +18,15 @@ from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules +import contextlib +import http.server import os +import shutil +import socketserver +import stat import subprocess +import sys +import tempfile from core.tests import test_utils import python_utils @@ -255,3 +262,88 @@ def mock_get_remote_alias(unused_url): common .ensure_release_scripts_folder_exists_and_is_up_to_date( )) + + def test_is_port_open(self): + self.assertFalse(common.is_port_open(4444)) + + handler = http.server.SimpleHTTPRequestHandler + httpd = socketserver.TCPServer(('', 4444), handler) + + self.assertTrue(common.is_port_open(4444)) + httpd.server_close() + + def test_permissions_of_file(self): + root_temp_dir = tempfile.mkdtemp() + temp_dirpath = tempfile.mkdtemp(dir=root_temp_dir) + temp_file = tempfile.NamedTemporaryFile(dir=temp_dirpath) + temp_file.name = 'temp_file' + temp_file_path = os.path.join(temp_dirpath, 'temp_file') + with python_utils.open_file(temp_file_path, 'w') as f: + f.write('content') + + common.recursive_chown(root_temp_dir, os.getuid(), -1) + common.recursive_chmod(root_temp_dir, 0o744) + + for root, directories, filenames in os.walk(root_temp_dir): + for directory in directories: + self.assertEqual( + oct(stat.S_IMODE( + os.stat(os.path.join(root, directory)).st_mode)), + '0744') + self.assertEqual( + os.stat(os.path.join(root, directory)).st_uid, os.getuid()) + + for filename in filenames: + self.assertEqual( + oct(stat.S_IMODE( + os.stat(os.path.join(root, filename)).st_mode)), '0744') + self.assertEqual( + os.stat(os.path.join(root, filename)).st_uid, os.getuid()) + + shutil.rmtree(root_temp_dir) + + def test_print_each_string_after_two_new_lines(self): + @contextlib.contextmanager + def _redirect_stdout(new_target): + """Redirect stdout to the new target. + + Args: + new_target: TextIOWrapper. The new target to which stdout is + redirected. + + Yields: + TextIOWrapper. The new target. + """ + old_target = sys.stdout + sys.stdout = new_target + try: + yield new_target + finally: + sys.stdout = old_target + + target_stdout = python_utils.string_io() + with _redirect_stdout(target_stdout): + common.print_each_string_after_two_new_lines([ + 'These', 'are', 'sample', 'strings.']) + + self.assertEqual( + target_stdout.getvalue(), 'These\n\nare\n\nsample\n\nstrings.\n\n') + + def test_install_npm_library(self): + + def _mock_subprocess_call(unused_command): + """Mocks subprocess.call() to create a temporary file instead of the + actual npm library. + """ + temp_file = tempfile.NamedTemporaryFile() + temp_file.name = 'temp_file' + with python_utils.open_file('temp_file', 'w') as f: + f.write('content') + + self.assertTrue(os.path.exists('temp_file')) + temp_file.close() + + self.assertFalse(os.path.exists('temp_file')) + + with self.swap(subprocess, 'call', _mock_subprocess_call): + common.install_npm_library('library_name', 'version', 'path') diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 598ccb74a5da..733f3b9dfb8f 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -36,12 +36,6 @@ 'pip', 'install', 'future==0.17.1', '--target', os.path.join('third_party', 'future-0.17.1')]) -# Download and install psutil. -if not os.path.exists(os.path.join(TOOLS_DIR, 'psutil-5.6.3')): - subprocess.call([ - 'pip', 'install', 'psutil==5.6.3', '--target', - os.path.join(TOOLS_DIR, 'psutil-5.6.3')]) - # pylint: disable=wrong-import-position # pylint: disable=wrong-import-order import python_utils # isort:skip diff --git a/scripts/pre_commit_linter.py b/scripts/pre_commit_linter.py index 88445d8d6649..0512e0d942fd 100644 --- a/scripts/pre_commit_linter.py +++ b/scripts/pre_commit_linter.py @@ -567,7 +567,6 @@ os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-quotes-0.1.8'), os.path.join(_PARENT_DIR, 'oppia_tools', 'selenium-3.13.0'), os.path.join(_PARENT_DIR, 'oppia_tools', 'PyGithub-1.43.7'), - os.path.join(_PARENT_DIR, 'oppia_tools', 'psutil-5.6.3'), os.path.join(_PARENT_DIR, 'oppia_tools', 'Pillow-6.0.0'), os.path.join('third_party', 'backports.functools_lru_cache-1.5'), os.path.join('third_party', 'beautifulsoup4-4.7.1'), diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index f6f2d9090d7f..48a52d514852 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -19,6 +19,7 @@ import argparse import atexit import os +import signal import subprocess import time @@ -57,9 +58,9 @@ USUAL_PORT_NUMBER_FOR_GAE_SERVER_IN_START = 8181 -def cleanup(): +def cleanup(pid): """Send a kill signal to the dev server.""" - common.kill_process(PORT_NUMBER_FOR_GAE_SERVER) + os.kill(pid, signal.SIGTERM) # Wait for the servers to go down; suppress 'connection refused' error # output from nc since that is exactly what we are expecting to happen. @@ -104,17 +105,12 @@ def main(args=None): 'Exiting.']) raise Exception - # Forces the cleanup function to run on exit. - # Developers: note that at the end of this script, the cleanup() function at - # the top of the file is run. - atexit.register(cleanup) - browsermob_proxy_path = os.path.join( common.OPPIA_TOOLS_DIR, 'browsermob-proxy-2.1.1', 'bin', 'browsermob-proxy') # Change execute status of browsermob-proxy. - common.recursive_chmod(browsermob_proxy_path, 744) + common.recursive_chmod(browsermob_proxy_path, 0o744) # Start a demo server. background_process = subprocess.Popen( @@ -124,6 +120,11 @@ def main(args=None): common.GOOGLE_APP_ENGINE_HOME, python_utils.UNICODE(PORT_NUMBER_FOR_GAE_SERVER)), shell=True) + # Forces the cleanup function to run on exit. + # Developers: note that at the end of this script, the cleanup() function at + # the top of the file is run. + atexit.register(cleanup, background_process.pid) + # Wait for the servers to come up. while not common.is_port_open(PORT_NUMBER_FOR_GAE_SERVER): time.sleep(1) @@ -154,7 +155,7 @@ def main(args=None): run_performance_test('profile_page_test', xvfb_prefix) run_performance_test('splash_test', xvfb_prefix) - common.recursive_chmod(browsermob_proxy_path, 644) + common.recursive_chmod(browsermob_proxy_path, 0o644) clean.delete_file('bmp.log') clean.delete_file('server.log') From 911d84ab18caea4656763f04a0812aa6d84de01f Mon Sep 17 00:00:00 2001 From: Rishav Chakraborty Date: Sun, 8 Sep 2019 22:32:30 +0530 Subject: [PATCH 141/141] use triple strings --- scripts/build.py | 10 ++++----- scripts/clean.py | 5 +++-- scripts/create_expression_parser.py | 9 ++++---- scripts/install_chrome_on_travis.py | 7 +++--- scripts/install_third_party.py | 5 +++-- scripts/install_third_party_libs.py | 5 +++-- scripts/run_backend_tests.py | 9 ++++---- scripts/run_frontend_tests.py | 13 ++++++------ scripts/run_performance_tests.py | 17 ++++++++------- scripts/run_presubmit_checks.py | 33 ++++++++++++++--------------- scripts/run_tests.py | 15 +++++++------ scripts/setup.py | 5 +++-- scripts/setup_gae.py | 6 +++--- scripts/start.py | 9 ++++---- 14 files changed, 79 insertions(+), 69 deletions(-) diff --git a/scripts/build.py b/scripts/build.py index 5ae02ecbc2d0..f75be4909a68 100644 --- a/scripts/build.py +++ b/scripts/build.py @@ -143,11 +143,11 @@ APP_DEV_YAML_FILEPATH = 'app_dev.yaml' APP_YAML_FILEPATH = 'app.yaml' -_PARSER = argparse.ArgumentParser( - description='Creates a third-party directory where all the JS and CSS ' - 'dependencies are built and stored. Depending on the options passed to the ' - 'script, might also minify third-party libraries and/or generate a build ' - 'directory.') +_PARSER = argparse.ArgumentParser(description=""" +Creates a third-party directory where all the JS and CSS dependencies are +built and stored. Depending on the options passed to the script, might also +minify third-party libraries and/or generate a build directory. +""") _PARSER.add_argument( '--prod_env', action='store_true', default=False, dest='prod_mode') diff --git a/scripts/clean.py b/scripts/clean.py index 4eeea439bc9a..778425b0f6ec 100644 --- a/scripts/clean.py +++ b/scripts/clean.py @@ -25,8 +25,9 @@ CURR_DIR = os.path.abspath(os.getcwd()) OPPIA_TOOLS_DIR = os.path.join(CURR_DIR, '..', 'oppia_tools') -_PARSER = argparse.ArgumentParser( - description='Deletes temporary and installed files.') +_PARSER = argparse.ArgumentParser(description=""" +Deletes temporary and installed files. +""") def delete_directory_tree(directory_path): diff --git a/scripts/create_expression_parser.py b/scripts/create_expression_parser.py index 49eb36b0a76c..a018d6ed6db8 100644 --- a/scripts/create_expression_parser.py +++ b/scripts/create_expression_parser.py @@ -27,10 +27,11 @@ from . import common from . import setup -_PARSER = argparse.ArgumentParser( - description='Run this script from the oppia root folder: ' - 'python -m scripts.create_expression_parser. ' - 'The root folder MUST be named \'oppia\'.') +_PARSER = argparse.ArgumentParser(description=""" +Run this script from the oppia root folder: + python -m scripts.create_expression_parser +The root folder MUST be named 'oppia'. +""") def main(args=None): diff --git a/scripts/install_chrome_on_travis.py b/scripts/install_chrome_on_travis.py index ced69df34185..fad933830b14 100644 --- a/scripts/install_chrome_on_travis.py +++ b/scripts/install_chrome_on_travis.py @@ -24,9 +24,10 @@ import python_utils -_PARSER = argparse.ArgumentParser( - description='This script should only be run by Travis to install and ' - 'provide a constant version of Chrome.') +_PARSER = argparse.ArgumentParser(description=""" +This script should only be run by Travis to install and provide a constant +version of Chrome. +""") def main(args=None): diff --git a/scripts/install_third_party.py b/scripts/install_third_party.py index 12bc4c678fae..de4a271d360f 100644 --- a/scripts/install_third_party.py +++ b/scripts/install_third_party.py @@ -70,8 +70,9 @@ } } -_PARSER = argparse.ArgumentParser( - description='Installation script for Oppia third-party libraries.') +_PARSER = argparse.ArgumentParser(description=""" +Installation script for Oppia third-party libraries. +""") def download_files(source_url_root, target_dir, source_filenames): diff --git a/scripts/install_third_party_libs.py b/scripts/install_third_party_libs.py index 733f3b9dfb8f..5320d8f2eee0 100644 --- a/scripts/install_third_party_libs.py +++ b/scripts/install_third_party_libs.py @@ -50,8 +50,9 @@ # pylint: enable=wrong-import-order # pylint: enable=wrong-import-position -_PARSER = argparse.ArgumentParser( - description='Installation script for Oppia third-party libraries.') +_PARSER = argparse.ArgumentParser(description=""" +Installation script for Oppia third-party libraries. +""") _PARSER.add_argument( '--nojsrepl', diff --git a/scripts/run_backend_tests.py b/scripts/run_backend_tests.py index 06f5bc7e5e48..74092bf3bcb8 100644 --- a/scripts/run_backend_tests.py +++ b/scripts/run_backend_tests.py @@ -80,10 +80,11 @@ _LOAD_TESTS_DIR = os.path.join(os.getcwd(), 'core', 'tests', 'load_tests') -_PARSER = argparse.ArgumentParser( - description='Run this script from the oppia root folder: ' - 'python -m scripts.run_backend_tests. ' - 'IMPORTANT: Only one of --test_path and --test_target should be specified.') +_PARSER = argparse.ArgumentParser(description=""" +Run this script from the oppia root folder: + python -m scripts.run_backend_tests +IMPORTANT: Only one of --test_path and --test_target should be specified. +""") _EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group() _EXCLUSIVE_GROUP.add_argument( diff --git a/scripts/run_frontend_tests.py b/scripts/run_frontend_tests.py index a8c783272ff8..c93276cdab34 100644 --- a/scripts/run_frontend_tests.py +++ b/scripts/run_frontend_tests.py @@ -28,12 +28,13 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser( - description='Run this script from the oppia root folder: ' - 'python -m scripts.run_frontend_tests. ' - 'The root folder MUST be named \'oppia\'. ' - 'Note: You can replace \'it\' with \'fit\' or \'describe\' with ' - '\'fdescribe\' to run a single test or test suite.') +_PARSER = argparse.ArgumentParser(description=""" +Run this script from the oppia root folder: + python -m scripts.run_frontend_tests +The root folder MUST be named 'oppia'. +Note: You can replace 'it' with 'fit' or 'describe' with 'fdescribe' to run +a single test or test suite. +""") _PARSER.add_argument( '--skip_install', diff --git a/scripts/run_performance_tests.py b/scripts/run_performance_tests.py index 48a52d514852..518f9f61d1fd 100644 --- a/scripts/run_performance_tests.py +++ b/scripts/run_performance_tests.py @@ -31,14 +31,15 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser( - description='The root folder MUST be named \'oppia\'. ' - 'Run all tests sequentially: ' - 'python -m scripts.run_performance_tests without args in order to run all ' - 'tests sequentially. ' - 'Run test for a specific page: ' - 'python -m scripts.run_performance_tests --test_name=page_test. ' - 'page_test is the name of the file containing that test eg. splash_test.') +_PARSER = argparse.ArgumentParser(description=""" +The root folder MUST be named 'oppia'. +Run all tests sequentially: + python -m scripts.run_performance_tests without args in order to run all + tests sequentially. +Run test for a specific page: + python -m scripts.run_performance_tests --test_name=page_test +page_test is the name of the file containing that test eg. splash_test. +""") _PARSER.add_argument( '--skip_install', diff --git a/scripts/run_presubmit_checks.py b/scripts/run_presubmit_checks.py index f979ef44ccd7..51494f65aefd 100644 --- a/scripts/run_presubmit_checks.py +++ b/scripts/run_presubmit_checks.py @@ -31,23 +31,22 @@ from . import run_backend_tests from . import run_frontend_tests -_PARSER = argparse.ArgumentParser( - description='Run this script from the oppia root folder prior to opening a ' - 'PR: ' - 'python -m scripts.run_presubmit_checks ' - 'Set the origin branch to compare against by adding ' - '--branch=your_branch or -b=your_branch ' - 'By default, if the current branch tip exists on remote origin, ' - 'the current branch is compared against its tip on GitHub. ' - 'Otherwise it\'s compared against \'develop\'. ' - 'This script runs the following tests in all cases. ' - '- Javascript and Python Linting ' - '- Backend Python tests ' - 'Only when frontend files are changed will it run Frontend Karma unit ' - 'tests. ' - 'If any of these tests result in errors, this script will terminate. ' - 'Note: The test scripts are arranged in increasing order of time taken. ' - 'This enables a broken build to be detected as quickly as possible.') +_PARSER = argparse.ArgumentParser(description=""" +Run this script from the oppia root folder prior to opening a PR: + python -m scripts.run_presubmit_checks +Set the origin branch to compare against by adding +--branch=your_branch or -b=your_branch +By default, if the current branch tip exists on remote origin, +the current branch is compared against its tip on GitHub. +Otherwise it's compared against 'develop'. +This script runs the following tests in all cases. +- Javascript and Python Linting +- Backend Python tests +Only when frontend files are changed will it run Frontend Karma unit tests. +If any of these tests result in errors, this script will terminate. +Note: The test scripts are arranged in increasing order of time taken. This +enables a broken build to be detected as quickly as possible. +""") _PARSER.add_argument( '--branch', '-b', diff --git a/scripts/run_tests.py b/scripts/run_tests.py index 489d0eac145b..11342b2ced8b 100644 --- a/scripts/run_tests.py +++ b/scripts/run_tests.py @@ -35,13 +35,14 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser( - description='Run this script from the oppia root folder: ' - 'python -m scripts.run_tests ' - 'This script runs all the tests, in this order: ' - '- Frontend Karma unit tests ' - '- Backend Python tests ' - '- End-to-end Protractor tests ') +_PARSER = argparse.ArgumentParser(description=""" +Run this script from the oppia root folder: + python -m scripts.run_tests +This script runs all the tests, in this order: +- Frontend Karma unit tests +- Backend Python tests +- End-to-end Protractor tests +""") def main(args=None): diff --git a/scripts/setup.py b/scripts/setup.py index 434e19415956..bb2efbd9f99d 100644 --- a/scripts/setup.py +++ b/scripts/setup.py @@ -27,8 +27,9 @@ from . import clean from . import common -_PARSER = argparse.ArgumentParser( - description='Python execution environent set up for all scripts.') +_PARSER = argparse.ArgumentParser(description=""" +Python execution environent set up for all scripts. +""") def delete_directory_tree(directory_path): diff --git a/scripts/setup_gae.py b/scripts/setup_gae.py index d95b9d75d186..9d72854fd615 100644 --- a/scripts/setup_gae.py +++ b/scripts/setup_gae.py @@ -26,9 +26,9 @@ from . import common -_PARSER = argparse.ArgumentParser( - description='Python execution environment setup for scripts that require ' - 'GAE.') +_PARSER = argparse.ArgumentParser(description=""" +Python execution environment setup for scripts that require GAE. +""") def main(args=None): diff --git a/scripts/start.py b/scripts/start.py index 3eebd04112ef..31f347f388da 100644 --- a/scripts/start.py +++ b/scripts/start.py @@ -35,10 +35,11 @@ from . import setup from . import setup_gae -_PARSER = argparse.ArgumentParser( - description='Run the script from the oppia root folder: ' - 'python -m scripts.start ' - 'Note that the root folder MUST be named \'oppia\'.') +_PARSER = argparse.ArgumentParser(description=""" +Run the script from the oppia root folder: + python -m scripts.start +Note that the root folder MUST be named 'oppia'. +""") _PARSER.add_argument( '--save_datastore',