diff --git a/teste_dag.py b/teste_dag.py index aaa690adcccbe604f5fb89003abb1d39c208de53..1ef297da044891d8896224c11e7656b99710f473 100644 --- a/teste_dag.py +++ b/teste_dag.py @@ -17,32 +17,26 @@ repo_home = '/opt/airflow/dags/repo' "input_file": Param("fichas_cadastro_individual_5000.jsonl", type="string"), }, ) -def process_records(): - @task - def task_spark(params: dict): - return SparkSubmitOperator( - task_id="landing_to_bronze", - application=f"{repo_home}/gen_bronze.py", - application_args=[ - params["input_file"] - ], - packages="org.apache.hadoop:hadoop-aws:3.3.4" - ) - - @task - def task_dbt_setup(): - return BashOperator( - task_id="dbt_setup", - bash_command=f"rm -rf /tmp/proj_teste && cp -r {repo_home}/proj_teste /tmp/proj_teste", - ) - - @task - def task_bronze_to_gold(): - return BashOperator( - task_id="bronze_to_silver_to_gold", - cwd="/tmp/proj_teste", - bash_command="dbt deps && dbt build", - ) +def process_records(params: dict): + task_spark = SparkSubmitOperator( + task_id="landing_to_bronze", + application=f"{repo_home}/gen_bronze.py", + application_args=[ + params["input_file"] + ], + packages="org.apache.hadoop:hadoop-aws:3.3.4" + ) + + task_dbt_setup = BashOperator( + task_id="dbt_setup", + bash_command=f"rm -rf /tmp/proj_teste && cp -r {repo_home}/proj_teste /tmp/proj_teste", + ) + + task_bronze_to_gold = BashOperator( + task_id="bronze_to_silver_to_gold", + cwd="/tmp/proj_teste", + bash_command="dbt deps && dbt build", + ) task_spark >> task_dbt_setup >> task_bronze_to_gold