3

I get a 400 bad request error:

Exception in thread "main" com.google.api.client.googleapis.json.GoogleJsonResponseException: 400 Bad Request

{
  "code" : 400,
  "errors" : [ {
    "domain" : "global",
    "message" : "Bad Request",
    "reason" : "badRequest"
  } ],
  "message" : "Bad Request"
}

I'm not sure how to see the full request, but using Job's toPrettyString() method I get:

{configuration=
  {load=
    {
      createDisposition=CREATE_IF_NEEDED, 
      destinationTable={
        datasetId=vcf1, 
        projectId=x8-alien-rainfall-3, 
        tableId=NewTable
      }, 
      encoding=UTF-8,
      maxBadRecords=10, 
      schema={
        fields=[
          {name=sample_id, type=String}, 
          {name=chromosome, type=String}, 
          {name=start_pos, type=Integer}, 
          {name=end_pos, type=Integer}, 
          {name=reference, type=String}, 
          {name=observed, type=String}, 
          {name=quality, type=Float}, 
          {name=filter, type=String}, 
          {name=zygosity, type=String}, 
          {name=refGene_function, type=String}
        ]
      }, 
      skipLeadingRows=1, 
      sourceUris=[gs://vcfs/test_exome_part1.csv]
    }
  }, 
  jobReference={projectId=x8-alien-rainfall-3}
}

I setup my request using the instructions here: Load data from Google Cloud Storage to BigQuery using Java . The actual code is shown below:

  public static void loadCsvAsNewTable(Bigquery bigquery,
                                Integer skipLeadingRows,
                                Integer maxBadRecords)
      throws IOException {

    String encoding = "UTF-8";
    String csvFile = "gs://vcfs/test_exome_part1.csv";
    String datasetId = "vcf1";
    String tableId = "NewTable";

    Job insertJob = new Job();
    insertJob.setJobReference(new JobReference().setProjectId(PROJECT_ID));
    JobConfiguration config = new JobConfiguration();
    JobConfigurationLoad loadConfig = new JobConfigurationLoad();
    config.setLoad(loadConfig);

    List<String> sources = new ArrayList<String>();
    sources.add(csvFile);
    loadConfig.setSourceUris(sources);

    TableReference destinationTable = new TableReference();
    destinationTable.setDatasetId(datasetId);
    destinationTable.setTableId(tableId);
    destinationTable.setProjectId(PROJECT_ID);
    loadConfig.setDestinationTable(destinationTable);
    loadConfig.setSchema(tableSchema());
    loadConfig.setCreateDisposition("CREATE_IF_NEEDED");

    if (skipLeadingRows != null) {
      loadConfig.setSkipLeadingRows(skipLeadingRows);
    }
    if (maxBadRecords != null) {
      loadConfig.setMaxBadRecords(maxBadRecords);
    }

    loadConfig.setEncoding(encoding);
    config.setLoad(loadConfig);
    insertJob.setConfiguration(config);

    System.out.println(insertJob.toPrettyString());

    Insert insert = bigquery.jobs().insert(PROJECT_ID, insertJob);
    insert.setProjectId(PROJECT_ID);

    println("Starting load job.");
    Job job = insert.execute();
    if (isJobRunning(job)) {
      Job doneJob = waitForJob(bigquery, PROJECT_ID, job.getJobReference());
      println("Done: " + doneJob.toString());
    } else {
      println("Error: " + job.toString());
    }
  }

I'm able to query a table using the same GoogleCredential, which uses a Service Account approach.

Any assistance would be greatly appreciated.

4

2 回答 2

2

请求的架构部分中指定的字段类型必须小写:字符串而不是字符串

于 2014-10-03T17:45:12.350 回答
1

异常代码 400 表示billingTierLimitExceeded

当您尝试执行超出项目最大计费层级的高计算查询时,会返回此错误。

故障排除:减少每个输入字节完成的计算次数或启用高计算查询以允许每个字节进行更多计算。

您可以在此处参考故障排除错误

于 2017-02-13T19:22:33.257 回答