Commit 2aa412e2 authored by Gmodena's avatar Gmodena
Browse files

Build.yml now uses make commands

parent 4516a4a0
...@@ -17,23 +17,19 @@ jobs: ...@@ -17,23 +17,19 @@ jobs:
uses: actions/setup-python@v1 uses: actions/setup-python@v1
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install build dependencies - name: Install dependencies
run: | run: |
pip install -r requirements.txt make venv
pip install -e .
- name: Lint code with flake8
run: |
pip install flake8==3.8
# stop the build if there are Python syntax errors or undefined names in *.py files
flake8 *.py etl/ tests/ --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 *.py etl/ tests/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- uses: olafurpg/setup-scala@v10 - uses: olafurpg/setup-scala@v10
with: with:
java-version: adopt@1.8 java-version: adopt@1.8
- name: Install Spark - name: Install Apache Spark
run: | run: |
# This command will install vanilla spark under ./spark-2.4.7-bin-hadoop2.7
make install_spark make install_spark
- name: Test with pytest - name: Test with pytest
run: | run: |
export SPARK_HOME=$(pwd)/spark-2.4.7-bin-hadoop2.7
export PYTHONPATH=${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.10.7-src.zip:${PYTHONPATH}
export PATH=${PATH}:${SPARK_HOME}/bin:${SPARK_HOME}/sbin
make test make test
spark_version = 2.4.7 spark_version := 2.4.7
hadoop_version = 2.7 hadoop_version := 2.7
spark_home = spark-${spark_version}-bin-hadoop${hadoop_version} spark_home := spark-${spark_version}-bin-hadoop${hadoop_version}
spark_tgz_url = http://apachemirror.wuchna.com/spark/spark-${spark_version}/${spark_home}.tgz spark_tgz_url := http://apachemirror.wuchna.com/spark/spark-${spark_version}/${spark_home}.tgz
venv: requirements.txt venv: requirements.txt
test -d venv || python3 -m venv venv test -d venv || python -m venv venv
. venv/bin/activate; pip3 install -Ur requirements.txt; . venv/bin/activate; pip install -Ur requirements.txt;
install_spark: install_spark:
test -d ${spark_home} || (wget ${spark_tgz_url}; tar -xzvf ${spark_home}.tgz) test -d ${spark_home} || (wget ${spark_tgz_url}; tar -xzvf ${spark_home}.tgz)
clean_spark: clean_spark:
rm -r ${spark_home} rm -r ${spark_home}; rm -rf ${spark_home}.tgz
test: install_spark venv flake8: venv
. venv/bin/activate
# stop the build if there are Python syntax errors or undefined names in *.py file
flake8 *.py etl/ tests/ --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 *.py etl/ tests/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
test: venv
. venv/bin/activate; pytest --cov etl tests/ . venv/bin/activate; pytest --cov etl tests/
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment