Преобразование DynamicFrame в DataFrame для spark, добавление нового столбца с текущей меткой времени, а затем преобразование его обратно в DynamicFrame перед записью.
import org.apache.spark.sql.functions._
...
val timestampedDf = dropnullfields3.toDF().withColumn("TimeStamp", current_timestamp())
val timestamped4 = DynamicFrame(timestampedDf, glueContext)
Вот как должен выглядеть ваш код Python:
import sys
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext, DynamicFrame
from awsglue.job import Job
from pyspark.sql.functions import current_timestamp
## @params: [TempDir, JOB_NAME]
args = getResolvedOptions(sys.argv, ['TempDir','JOB_NAME'])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
job.init(args['JOB_NAME'], args)
datasource0 = glueContext.create_dynamic_frame.from_catalog(database = "sampledb", table_name = "abs", transformation_ctx = "datasource0")
applymapping1 = ApplyMapping.apply(frame = datasource0, mappings = [("ColumnA", "char", "ColumnA", "char"), ("ColumnB", "char", "ColumnB", "char")], transformation_ctx = "applymapping1")
resolvechoice2 = ResolveChoice.apply(frame = applymapping1, choice = "make_cols", transformation_ctx = "resolvechoice2")
dropnullfields3 = DropNullFields.apply(frame = resolvechoice2, transformation_ctx = "dropnullfields3")
# add TimeStamp column
timestampedDf = dropnullfields3.toDF().withColumn("TimeStamp", current_timestamp())
timestamped4 = DynamicFrame.fromDF(timestampedDf, glueContext, "timestampedDf")
datasink4 = glueContext.write_dynamic_frame.from_jdbc_conf(frame = timestamped4, catalog_connection = "TESTDB", connection_options = {"dbtable": "TABLEA", "database": "anasightprd01"}, redshift_tmp_dir = args["TempDir"], transformation_ctx = "datasink4")