Skip to content
Snippets Groups Projects
Commit 0dff2977 authored by edvs19's avatar edvs19
Browse files

teste

parent 517416ad
No related branches found
No related tags found
No related merge requests found
......@@ -12,38 +12,38 @@ from airflow.providers.apache.spark.operators.spark_submit import SparkSubmitOpe
repo_home = '/opt/airflow/dags/repo'
@dag(
params={
"input_file" : Param(
"nofile.jsonl",
"jsonl to be used as input in landing to bronze",
str
)
}
)
def process_records(params: dict):
"""
DAG que realiza o fluxo landing -> bronze -> silver -> gold
"""
# @dag(
# params={
# "input_file" : Param(
# "nofile.jsonl",
# "jsonl to be used as input in landing to bronze",
# str
# )
# }
# )
# def process_records(params: dict):
# """
# DAG que realiza o fluxo landing -> bronze -> silver -> gold
# """
task_1 = SparkSubmitOperator(
application=f"{repo_home}/gen_bronze.py",
task_id="landing_to_bronze",
packages = "org.apache.hadoop:hadoop-aws:3.3.4"
)
# task_1 = SparkSubmitOperator(
# application=f"{repo_home}/gen_bronze.py",
# task_id="landing_to_bronze",
# packages = "org.apache.hadoop:hadoop-aws:3.3.4"
# )
task_2 = BashOperator(
task_id="dbt_setup",
bash_command=f"rm -rf /tmp/proj_teste && cp -r {repo_home}/proj_teste /tmp/proj_teste",
)
# task_2 = BashOperator(
# task_id="dbt_setup",
# bash_command=f"rm -rf /tmp/proj_teste && cp -r {repo_home}/proj_teste /tmp/proj_teste",
# )
task_3 = BashOperator(
task_id="bronze_to_silver_to_gold",
cwd="/tmp/proj_teste",
bash_command="dbt deps && dbt build",
)
# task_3 = BashOperator(
# task_id="bronze_to_silver_to_gold",
# cwd="/tmp/proj_teste",
# bash_command="dbt deps && dbt build",
# )
task_1 >> task_2 >> task_3
# task_1 >> task_2 >> task_3
@dag
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment