Skip to content

Commit 219c506

Browse files
hgrifbolkedebruin
authored andcommitted
[AIRFLOW-1094] Run unit tests under contrib in Travis
Rename all unit tests under tests/contrib to start with test_* and fix broken unit tests so that they run for the Python 2 and 3 builds. Closes #2234 from hgrif/AIRFLOW-1094
1 parent 74c1ce2 commit 219c506

34 files changed

+176
-119
lines changed

airflow/contrib/operators/ecs_operator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def execute(self, context):
8989

9090
def _wait_for_task_ended(self):
9191
waiter = self.client.get_waiter('tasks_stopped')
92-
waiter.config.max_attempts = sys.maxint # timeout is managed by airflow
92+
waiter.config.max_attempts = sys.maxsize # timeout is managed by airflow
9393
waiter.wait(
9494
cluster=self.cluster,
9595
tasks=[self.arn]

airflow/hooks/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@
4848
'samba_hook': ['SambaHook'],
4949
'sqlite_hook': ['SqliteHook'],
5050
'S3_hook': ['S3Hook'],
51+
'zendesk_hook': ['ZendeskHook'],
5152
'http_hook': ['HttpHook'],
5253
'druid_hook': ['DruidHook'],
5354
'jdbc_hook': ['JdbcHook'],

airflow/hooks/zendesk_hook.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import logging
2222
import time
2323
from zdesk import Zendesk, RateLimitError, ZendeskError
24-
from airflow.hooks import BaseHook
24+
from airflow.hooks.base_hook import BaseHook
2525

2626

2727
class ZendeskHook(BaseHook):

scripts/ci/requirements.txt

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ azure-storage>=0.34.0
33
bcrypt
44
bleach
55
boto
6+
boto3
67
celery
78
cgroupspy
89
chartkick
@@ -11,6 +12,7 @@ coverage
1112
coveralls
1213
croniter
1314
cryptography
15+
datadog
1416
dill
1517
distributed
1618
docker-py
@@ -25,6 +27,7 @@ Flask-WTF
2527
flower
2628
freezegun
2729
future
30+
google-api-python-client>=1.5.0,<1.6.0
2831
gunicorn
2932
hdfs
3033
hive-thrift-py
@@ -37,6 +40,7 @@ ldap3
3740
lxml
3841
markdown
3942
mock
43+
moto
4044
mysqlclient
4145
nose
4246
nose-exclude
@@ -69,3 +73,4 @@ statsd
6973
thrift
7074
thrift_sasl
7175
unicodecsv
76+
zdesk
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def test_invalid_source_format(self):
110110
hook.BigQueryBaseCursor("test", "test").run_load("test.test", "test_schema.json", ["test_data.json"], source_format="json")
111111

112112
# since we passed 'json' in, and it's not valid, make sure it's present in the error string.
113-
self.assertIn("json", str(context.exception))
113+
self.assertIn("JSON", str(context.exception))
114114

115115

116116
class TestBigQueryBaseCursor(unittest.TestCase):
File renamed without changes.
File renamed without changes.

tests/contrib/hooks/spark_submit_hook.py renamed to tests/contrib/hooks/test_spark_submit_hook.py

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,16 +12,19 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
#
15-
import os
15+
import sys
1616
import unittest
17+
from io import StringIO
18+
19+
import mock
1720

1821
from airflow import configuration, models
1922
from airflow.utils import db
20-
from airflow.exceptions import AirflowException
2123
from airflow.contrib.hooks.spark_submit_hook import SparkSubmitHook
2224

2325

2426
class TestSparkSubmitHook(unittest.TestCase):
27+
2528
_spark_job_file = 'test_application.py'
2629
_config = {
2730
'conf': {
@@ -43,6 +46,11 @@ class TestSparkSubmitHook(unittest.TestCase):
4346
}
4447

4548
def setUp(self):
49+
50+
if sys.version_info[0] == 3:
51+
raise unittest.SkipTest('TestSparkSubmitHook won\'t work with '
52+
'python3. No need to test anything here')
53+
4654
configuration.load_test_config()
4755
db.merge_conn(
4856
models.Connection(
@@ -97,13 +105,17 @@ def test_build_command(self):
97105
if self._config['verbose']:
98106
assert "--verbose" in cmd
99107

100-
def test_submit(self):
108+
@mock.patch('airflow.contrib.hooks.spark_submit_hook.subprocess')
109+
def test_submit(self, mock_process):
110+
# We don't have spark-submit available, and this is hard to mock, so let's
111+
# just use this simple mock.
112+
mock_Popen = mock_process.Popen.return_value
113+
mock_Popen.stdout = StringIO(u'stdout')
114+
mock_Popen.stderr = StringIO(u'stderr')
115+
mock_Popen.returncode = None
116+
mock_Popen.communicate.return_value = ['extra stdout', 'extra stderr']
101117
hook = SparkSubmitHook()
102-
103-
# We don't have spark-submit available, and this is hard to mock, so just accept
104-
# an exception for now.
105-
with self.assertRaises(AirflowException):
106-
hook.submit(self._spark_job_file)
118+
hook.submit(self._spark_job_file)
107119

108120
def test_resolve_connection(self):
109121

0 commit comments

Comments
 (0)