Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 822a332

Browse filesBrowse files
authored
Rename composer quickstart to hadoop tutorial (GoogleCloudPlatform#2245)
1 parent 950d97a commit 822a332
Copy full SHA for 822a332

File tree

Expand file treeCollapse file tree

2 files changed

+11
-11
lines changed
Filter options
Expand file treeCollapse file tree

2 files changed

+11
-11
lines changed

‎composer/workflows/quickstart.py renamed to ‎composer/workflows/hadoop_tutorial.py

Copy file name to clipboardExpand all lines: composer/workflows/hadoop_tutorial.py
+10-10Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
# [START composer_quickstart]
15+
# [START composer_hadoop_tutorial]
1616
"""Example Airflow DAG that creates a Cloud Dataproc cluster, runs the Hadoop
1717
wordcount example, and deletes the cluster.
1818
@@ -62,20 +62,20 @@
6262
'project_id': models.Variable.get('gcp_project')
6363
}
6464

65-
# [START composer_quickstart_schedule]
65+
# [START composer_hadoop_schedule]
6666
with models.DAG(
67-
'composer_sample_quickstart',
67+
'composer_hadoop_tutorial',
6868
# Continue to run DAG once per day
6969
schedule_interval=datetime.timedelta(days=1),
7070
default_args=default_dag_args) as dag:
71-
# [END composer_quickstart_schedule]
71+
# [END composer_hadoop_schedule]
7272

7373
# Create a Cloud Dataproc cluster.
7474
create_dataproc_cluster = dataproc_operator.DataprocClusterCreateOperator(
7575
task_id='create_dataproc_cluster',
7676
# Give the cluster a unique name by appending the date scheduled.
7777
# See https://airflow.apache.org/code.html#default-variables
78-
cluster_name='quickstart-cluster-{{ ds_nodash }}',
78+
cluster_name='composer-hadoop-tutorial-cluster-{{ ds_nodash }}',
7979
num_workers=2,
8080
zone=models.Variable.get('gce_zone'),
8181
master_machine_type='n1-standard-1',
@@ -86,20 +86,20 @@
8686
run_dataproc_hadoop = dataproc_operator.DataProcHadoopOperator(
8787
task_id='run_dataproc_hadoop',
8888
main_jar=WORDCOUNT_JAR,
89-
cluster_name='quickstart-cluster-{{ ds_nodash }}',
89+
cluster_name='composer-hadoop-tutorial-cluster-{{ ds_nodash }}',
9090
arguments=wordcount_args)
9191

9292
# Delete Cloud Dataproc cluster.
9393
delete_dataproc_cluster = dataproc_operator.DataprocClusterDeleteOperator(
9494
task_id='delete_dataproc_cluster',
95-
cluster_name='quickstart-cluster-{{ ds_nodash }}',
95+
cluster_name='composer-hadoop-tutorial-cluster-{{ ds_nodash }}',
9696
# Setting trigger_rule to ALL_DONE causes the cluster to be deleted
9797
# even if the Dataproc job fails.
9898
trigger_rule=trigger_rule.TriggerRule.ALL_DONE)
9999

100-
# [START composer_quickstart_steps]
100+
# [START composer_hadoop_steps]
101101
# Define DAG dependencies.
102102
create_dataproc_cluster >> run_dataproc_hadoop >> delete_dataproc_cluster
103-
# [END composer_quickstart_steps]
103+
# [END composer_hadoop_steps]
104104

105-
# [END composer_quickstart]
105+
# [END composer_hadoop]

‎composer/workflows/quickstart_test.py renamed to ‎composer/workflows/hadoop_tutorial_test.py

Copy file name to clipboardExpand all lines: composer/workflows/hadoop_tutorial_test.py
+1-1Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,5 +27,5 @@ def test_dag_import():
2727
models.Variable.set('gcs_bucket', 'example_bucket')
2828
models.Variable.set('gcp_project', 'example-project')
2929
models.Variable.set('gce_zone', 'us-central1-f')
30-
from . import quickstart as module
30+
from . import hadoop_tutorial as module
3131
unit_testing.assert_has_valid_dag(module)

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.