Search code examples
pythonairflowairflow-scheduler

How do we import dags in Airflow?


Hi im new to Airflow , im trying to import my own customize jar as DAG which is generated with Talend Open Studio BigData, and im having some trouble when i import my DAG via the terminal, no error is shown up and my DAG is not added to the DAG list in Airflow UI

Here is my .py file code :

from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from datetime import datetime
from airflow.utils.email import send_email

import os
import sys


bib_app = "/home/user/Docs/JObforAirflow/test/test_run.sh"
default_args = {
    'owner': 'yabid',
    'depends_on_past': False,
    'start_date': datetime(2019, 4, 29),
    'email': ['[email protected]'],
    'email_on_failure': True,
    'email_on_success': True,
    'provide_context': True    }

args = {
  'owner': 'yabid'
  ,'email': ['[email protected]']
  ,'start_date': datetime(2019, 4, 25)
  , 'provide_context': True    }

dag = DAG('run_jar', default_args=default_args)

t1 = BashOperator(
    task_id='dependency',
    bash_command= bib_app,
    dag=dag)


t2 = BashOperator(
 task_id = 't2',
 dag = dag,
 bash_command = 'java -cp /home/user/Docs/JObforAirflow/test/jobbatch.jar'
 )

t1.set_upstream(t2)

Solution

  • Did you copy this DAG file to ~/airflow/dags?

    All your *.py files need to be copied at AIRFLOW_HOME/dags where AIRFLOW_HOME=~/airflow

    from airflow import DAG
    from airflow.operators.bash_operator import BashOperator
    from datetime import datetime
    from airflow.utils.email import send_email
    
    import os
    import sys
    
    
    bib_app = "/home/user/Docs/JObforAirflow/test/test_run.sh"
    default_args = {
        'owner': 'yabid',
        'depends_on_past': False,
        'start_date': datetime(2019, 4, 25),
        'email': ['[email protected]'],
        'email_on_failure': True,
        'email_on_success': True,
        'provide_context': True
    }
    
    
    dag = DAG('run_jar', default_args=default_args)
    
    t1 = BashOperator(
        task_id='dependency',
        bash_command= bib_app,
        dag=dag)
    
    
    t2 = BashOperator(
        task_id = 't2',
        dag = dag,
        bash_command = 'java -cp /home/user/Docs/JObforAirflow/test/jobbatch.jar')
    
    t1 >> t2