получение 400 в sq oop импорт из postgresql в команду s3 - PullRequest
0 голосов
/ 13 января 2020

Я использую приведенную ниже команду Sq oop для импорта данных из PostgreSQL в S3:

sq oop import -Dfs.s3a.access.key = *** **** -Dfs.s3a.secret.key = ******* --connect jdb c: postgresql: // localhost: 5432 / acko_apps --username root --password root --table datamigrator_globalbusinessfiletracking --target-dir s3a: ///

но я получаю ошибку 400:

20/01/13 09:15:43 ERROR sqoop.Sqoop: Got exception running Sqoop: com.amazonaws.services.s3.model.AmazonS3Exception: Status Code: 400, AWS Service: Amazon S3, AWS Request ID: F98FE1639E39AC4D, AWS Error Code: null, AWS Error Message: Bad Request, S3 Extended Request ID: l+4A7xP/YyIwDoJquudTd5tmGHmZw2G85Ov5e9tbZJCCH+wltr8PzMqB5kbPX6qvaWK9ug+J+po=
com.amazonaws.services.s3.model.AmazonS3Exception: Status Code: 400, AWS Service: Amazon S3, AWS Request ID: F98FE1639E39AC4D, AWS Error Code: null, AWS Error Message: Bad Request, S3 Extended Request ID: l+4A7xP/YyIwDoJquudTd5tmGHmZw2G85Ov5e9tbZJCCH+wltr8PzMqB5kbPX6qvaWK9ug+J+po=
    at com.amazonaws.http.AmazonHttpClient.handleErrorResponse(AmazonHttpClient.java:798)
    at com.amazonaws.http.AmazonHttpClient.executeHelper(AmazonHttpClient.java:421)
    at com.amazonaws.http.AmazonHttpClient.execute(AmazonHttpClient.java:232)
    at com.amazonaws.services.s3.AmazonS3Client.invoke(AmazonS3Client.java:3528)
    at com.amazonaws.services.s3.AmazonS3Client.headBucket(AmazonS3Client.java:1031)
    at com.amazonaws.services.s3.AmazonS3Client.doesBucketExist(AmazonS3Client.java:994)
    at org.apache.hadoop.fs.s3a.S3AFileSystem.initialize(S3AFileSystem.java:297)
    at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2653)
    at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92)
    at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687)
    at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371)
    at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295)
    at org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.setOutputPath(FileOutputFormat.java:160)
    at org.apache.sqoop.mapreduce.ImportJobBase.configureOutputFormat(ImportJobBase.java:156)
    at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:259)
    at org.apache.sqoop.manager.SqlManager.importTable(SqlManager.java:673)
    at org.apache.sqoop.manager.PostgresqlManager.importTable(PostgresqlManager.java:127)
    at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:497)
    at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
    at org.apache.sqoop.Sqoop.run(Sqoop.java:143)
    at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
    at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:179)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:218)
    at org.apache.sqoop.Sqoop.runTool(Sqoop.java:227)
    at org.apache.sqoop.Sqoop.main(Sqoop.java:236)
...