Да, вы можете сделать это
import google.cloud
from google.cloud import bigquery
def save_df_to_bq(args,dataframe,csv_name):
print("Length of the incoming dataframe :-->" , len(dataframe))
print(dataframe.iloc[[0]])
client = bigquery.Client()
dataset_ref = client.dataset(dataset_id,project_id)
schema = [
bigquery.SchemaField('col_a ', 'dttype'),
bigquery.SchemaField('col_b ', 'dttype'),
bigquery.SchemaField('col_c ', 'dttype'),
]
acc_table_id = str(random.randint(100000,100000*1000000))
# table_ref = dataset_ref.table(acc_table_id)
# table = bigquery.Table(table_ref, schema=schema)
# table = client.create_table(table) # API request
# print("Table created..")
# acc_table_id = str(random.randint(100000,100000*1000000))
acc_table_ref = client.dataset(dataset_id,project_id).table(acc_table_id)
job = client.load_table_from_dataframe(dataframe, acc_table_ref, location="US")
job.result()