Я пытаюсь сделать искробезопасную отправку из Airflow в режиме кластера, я хотел указать свойства log4j в операторе отправки
task_id='spark_submit_job',
conn_id='spark_default',
files='/usr/hdp/current/spark-client/conf/hive-site.xml',
jars='/usr/hdp/current/spark-client/lib/datanucleus-api-jdo-3.2.6.jar,/usr/hdp/current/spark-client/lib/datanucleus-rdbms-3.2.9.jar,/usr/hdp/current/spark-client/lib/datanucleus-core-3.2.10.jar',
java_class='com.xxx.eim.job.SubmitSparkJob',
application='/root/airflow/code/eimdataprocessor.jar',
total_executor_cores='4',
executor_cores='4',
executor_memory='5g',
num_executors='4',
name='airflow-spark-example',
verbose=False,
driver_memory='10g',
application_args=["XXX"],
conf={'master':'yarn',
'spark.yarn.queue'='priority',
'spark.app.name'='XXX',
'spark.dynamicAllocation.enabled'='true'},
'spark.local.dir'='/opt/eim',
'spark.shuffle.service.enabled'='true',
'spark.hadoop.mapreduce.fileoutputcommitter.cleanup-failures.ignored'='true',
'spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version'='2'
},
dag=dag)