Ребята, я сталкиваюсь с ошибкой в моем коде Python, я пробовал много вещей, и ошибка сохраняется, как ее решить?
Код работал много месяцев спустя ... и неожиданно остановился
Сообщение об ошибке ...
глобальное имя 'nt' не определено: NameError Traceback (последний вызов последнего): файл "/var/task/lambda_function.py"строка 35, в lambda_handler nt («Имя лямбда-функции:» + context.function_name) NameError: глобальное имя nt не определено
from __future__ import print_function
# imports
import boto3
import time
import os
import urllib
# environment variables
# REGION NAME
REGION=os.environ['REGION']
# SNS topic to post email alerts to
SNS_TOPIC_ARN = os.environ['SNS_TOPIC_ARN']
# S3 table location
#S3_TABLE_LOCATION = os.environ['S3_TABLE_LOCATION']
# Hive Database
HIVE_DATABASE = os.environ['HIVE_DATABASE']
# Hive Table
HIVE_TABLE = os.environ['HIVE_TABLE']
# DEBUG
DEBUG=os.environ['DEBUG']
# Clients
sns_client = boto3.client('sns')
s3_client = boto3.client('s3')
athena_client = boto3.client('athena')
def lambda_handler(event, context):
nt("Lambda Function Name : " + context.function_name)
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
print("Source variable value in athena_add_partition : " + source)
if str(source) == "prd_parquet":
return partition_antigo_doo(event, context)
elif str(source) == "DATA_LAKE":
return partition_datalake(event, context)
def partition_datalake(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
print("Table variable value in athena_add_partition : " + table)
if str(table) == "API_0000_CKF_V1_CHECKLISTFACIL_HODOS":
return partition_checklistfacil(event, context)
elif str(table) == "BIGQUERY_0000_GA_V1_ANALYTICS_SESSAO":
return partition_bigquery_ga_sessao(event, context)
elif str(table) == "BIGQUERY_0000_GA_V1_ANALYTICS_PEDIDO":
return partition_bigquery_ga_pedido(event, context)
elif str(table) == "ARCH_0000_V1_NPS_LOJAS_FISICAS":
return partition_arch_nps_lojas_fisicas(event, context)
elif str(table) == "ARCH_0000_V1_NPS_ECOMM_OUTROS":
return partition_arch_nps_ecomm_outros(event, context)
elif str(table) == "ARCH_0000_V1_NPS_ECOMM_CLICK":
return partition_arch_nps_ecomm_click(event, context)
elif str(table) == "ARCH_0000_V1_NPS_CONTAGENS":
return partition_arch_nps_contagens(event, context)
elif str(table) == "ARCH_0000_API_LINX_TRACKING":
return partition_arch_0000_api_linx_tracking(event, context)
def partition_checklistfacil(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2].split("=")[1]
month = key.split("/")[3].split("=")[1]
day = key.split("/")[4].split("=")[1]
filename = key.split("/")[5]
query = 'ALTER TABLE api_0000_ckf_v1_checklistfacil_hodos ADD IF NOT EXISTS PARTITION (ano=' + str(year) + ', mes=' + str(month) + ', dia=' + str(day) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/ano=" + str(year) + "/mes=" + str(month) + "/dia=" + str(day) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: api_0000_ckf_v1_checklistfacil_hodos")
print(str(year))
print(str(month))
print(str(day))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_bigquery_ga_sessao(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2].split("=")[1]
month = key.split("/")[3].split("=")[1]
day = key.split("/")[4].split("=")[1]
filename = key.split("/")[5]
query = 'ALTER TABLE bigquery_0000_ga_v1_analytics_sessao ADD IF NOT EXISTS PARTITION (ano=' + str(year) + ', mes=' + str(month) + ', dia=' + str(day) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/ano=" + str(year) + "/mes=" + str(month) + "/dia=" + str(day) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: bigquery_0000_ga_v1_analytics_sessao")
print(str(year))
print(str(month))
print(str(day))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_bigquery_ga_pedido(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2].split("=")[1]
month = key.split("/")[3].split("=")[1]
day = key.split("/")[4].split("=")[1]
filename = key.split("/")[5]
query = 'ALTER TABLE bigquery_0000_ga_v1_analytics_pedido ADD IF NOT EXISTS PARTITION (ano=' + str(year) + ', mes=' + str(month) + ', dia=' + str(day) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/ano=" + str(year) + "/mes=" + str(month) + "/dia=" + str(day) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: bigquery_0000_ga_v1_analytics_pedido")
print(str(year))
print(str(month))
print(str(day))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_antigo_doo(event, context):
print("Lambda Function Name : " + context.function_name)
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
year = key.split("/")[1]
month = key.split("/")[2]
day = key.split("/")[3]
hour = key.split("/")[4]
filename = key.split("/")[5]
query = 'ALTER TABLE ' + str(HIVE_TABLE) + ' ADD IF NOT EXISTS PARTITION (partition_0=' + str(year) + ', partition_1=' + str(month) + ',partition_2=' + str(day) + ',partition_3='+ str(hour) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(year) + "/" + str(month) + "/" + str(day) + "/" + str(hour) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: " + str(HIVE_DATABASE))
print("HIVE_TABLE: " + str(HIVE_TABLE))
print("year: " + str(year))
print("month: " + str(month))
print("day: " + str(day))
print("hour: " + str(hour))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': HIVE_DATABASE},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_arch_nps_lojas_fisicas(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2].split("=")[1]
month = key.split("/")[3].split("=")[1]
day = key.split("/")[4].split("=")[1]
filename = key.split("/")[5]
query = 'ALTER TABLE arch_0000_v1_nps_lojas_fisicas ADD IF NOT EXISTS PARTITION (ano=' + str(year) + ', mes=' + str(month) + ', dia=' + str(day) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/ano=" + str(year) + "/mes=" + str(month) + "/dia=" + str(day) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: arch_0000_v1_nps_lojas_fisicas")
print(str(year))
print(str(month))
print(str(day))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_arch_nps_ecomm_outros(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2].split("=")[1]
month = key.split("/")[3].split("=")[1]
day = key.split("/")[4].split("=")[1]
filename = key.split("/")[5]
query = 'ALTER TABLE arch_0000_v1_nps_ecomm_outros ADD IF NOT EXISTS PARTITION (ano=' + str(year) + ', mes=' + str(month) + ', dia=' + str(day) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/ano=" + str(year) + "/mes=" + str(month) + "/dia=" + str(day) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: arch_0000_v1_nps_ecomm_outros")
print(str(year))
print(str(month))
print(str(day))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_arch_nps_ecomm_click(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2].split("=")[1]
month = key.split("/")[3].split("=")[1]
day = key.split("/")[4].split("=")[1]
filename = key.split("/")[5]
query = 'ALTER TABLE arch_0000_v1_nps_ecomm_click ADD IF NOT EXISTS PARTITION (ano=' + str(year) + ', mes=' + str(month) + ', dia=' + str(day) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/ano=" + str(year) + "/mes=" + str(month) + "/dia=" + str(day) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: arch_0000_v1_nps_ecomm_click")
print(str(year))
print(str(month))
print(str(day))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_arch_nps_contagens(event, context):
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2].split("=")[1]
month = key.split("/")[3].split("=")[1]
day = key.split("/")[4].split("=")[1]
filename = key.split("/")[5]
query = 'ALTER TABLE arch_0000_v1_nps_contagens ADD IF NOT EXISTS PARTITION (ano=' + str(year) + ', mes=' + str(month) + ', dia=' + str(day) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/ano=" + str(year) + "/mes=" + str(month) + "/dia=" + str(day) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: arch_0000_v1_nps_contagens")
print(str(year))
print(str(month))
print(str(day))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response
def partition_arch_0000_api_linx_tracking(event, context):
print("Lambda Function Name : " + context.function_name)
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
source = key.split("/")[0]
table = key.split("/")[1]
year = key.split("/")[2]
month = key.split("/")[3]
day = key.split("/")[4]
hour = key.split("/")[5]
filename = key.split("/")[6]
query = 'ALTER TABLE arch_0000_api_linx_tracking ADD IF NOT EXISTS PARTITION (partition_0=' + str(year) + ', partition_1=' + str(month) + ',partition_2=' + str(day) + ',partition_3='+ str(hour) + ') ' + 'LOCATION ' + "'" + 's3://' + str(bucket) + "/" + str(source) + "/" + str(table) + "/" + str(year) + "/" + str(month) + "/" + str(day) + "/" + str(hour) + "/" + "'" + ';'
if DEBUG == "true":
msg = "### Debug mode enabled ## " + '\n'
print(msg)
print("HIVE_DATABASE: centauro_spectrum_db")
print("HIVE_TABLE: arch_0000_api_linx_tracking")
print("year: " + str(year))
print("month: " + str(month))
print("day: " + str(day))
print("hour: " + str(hour))
print ("Athena Query: " + str(query))
response = athena_client.start_query_execution( QueryString=query,
QueryExecutionContext={'Database': 'centauro_spectrum_db'},
ResultConfiguration={'OutputLocation': 's3://it.centauro.app.doo.failed/lambda_add_partition'},
WorkGroup='w_lambda')
return response