【问题标题】:PERMISSION_DENIED: Permission TABLES_UPDATE_DATAPERMISSION_DENIED:权限 TABLES_UPDATE_DATA
【发布时间】:2021-07-30 16:14:53
【问题描述】:

我正在尝试使用用 Java 编写的云函数将 JSON 对象插入 Google BigQuery。但是,代码给出了一个奇怪的权限错误,我想确认为 Cloud 函数提供了所有 Bigquery 权限,以便写入表。 ProjectID、Dataset 名称和表名称也经过验证且正确无误。

我在从 Google Cloud Function 运行以下代码时遇到运行时异常。请帮忙。 错误:

2021-05-08 22:52:45.674 ISTTopicReaderGCPJFunctionesaj66v5ty43 OnError called: com.google.api.gax.rpc.PermissionDeniedException: io.grpc.StatusRuntimeException: PERMISSION_DENIED: Permission 'TABLES_UPDATE_DATA' denied on resource '<removed>' (or it may not exist). OnError called: com.google.api.gax.rpc.PermissionDeniedException: io.grpc.StatusRuntimeException: PERMISSION_DENIED: Permission 'TABLES_UPDATE_DATA' denied on resource '<removed>' (or it may not exist).

package functions;

import com.google.api.core.ApiFuture;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.Table;
import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
import com.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
import com.google.cloud.bigquery.storage.v1beta2.TableName;
import com.google.cloud.bigquery.storage.v1beta2.WriteStream;
import com.google.protobuf.Descriptors.DescriptorValidationException;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import org.json.JSONArray;
import org.json.JSONObject;

//import org.json.simple.JSONArray;
//import org.json.simple.JSONObject;
//import org.json.simple.parser.JSONParser;

import java.util.*;

public class WriteCommittedStream {
  // TODO(developer): Replace these variables before running the sample.
  public static boolean writeCommittedStreamToBQ(String projectId, String datasetName, String tableName,
      HashSet<JSONArray> streamHashSet) throws DescriptorValidationException, InterruptedException, IOException {

    try (BigQueryWriteClient client = BigQueryWriteClient.create()) {

      Iterator<JSONArray> value = streamHashSet.iterator();
      while (value.hasNext()) {
        // Create a JSON object that is compatible with the table schema.
        // JSONObject record = new JSONObject();

        // record.put("col1", String.format("record %03d", value.next()));
        // JSONArray jsonArr = new JSONArray();
        // value.next().
        // jsonArr.get(value.next());
        System.out.println(value.next().get(0).toString());
      }
      // Array -> {data}
      // Array -> [{data}]
      System.out.println("projectId:" + projectId);
      System.out.println("datasetName:" + datasetName);
      System.out.println("tableName:" + tableName);

      BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
      Table table = bigquery.getTable(datasetName, tableName);
      TableName parentTable = TableName.of(projectId, datasetName, tableName);
      Schema schema = table.getDefinition().getSchema();

      System.out.println("Schema:" + schema.toString());
      System.out.println("Table:" + parentTable.toString());

      try (JsonStreamWriter writer = JsonStreamWriter.newBuilder(parentTable.toString(), schema).createDefaultStream()
          .build()) {
       
        // Append 10 JSON objects to the stream.
        Iterator<JSONArray> value2 = streamHashSet.iterator();

        /////////////////////
/*
        JSONArray jsonArr = value2.next();
        System.out.println("Inside the loop:"+jsonArr);
        ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
        AppendRowsResponse response = future.get();
        System.out.println("Appended records successfully." + response.toString());
*/      
        JSONObject record = new JSONObject();
        record.put("RPdeviceName", String.format("record %03d",0));
        record.put("RPorganisationName", String.format("record %03d",1));
        record.put("RPdate", String.format("record %03d",2));
        record.put("RPtime", String.format("record %03d",3));
        record.put("RPmacid", String.format("record %03d",4));

        record.put("status", String.format("record %03d",5));
        record.put("mac", String.format("record %03d",6));
        record.put("date", String.format("record %03d",7));
        record.put("time", String.format("record %03d",8));
        record.put("count", String.format("record %03d",9));
        record.put("peakadc", String.format("record %03d",10));
        record.put("reset", String.format("record %03d",11));
        

        JSONArray jsonArr = new JSONArray();
        jsonArr.put(record);
        System.out.println("Initially." + jsonArr.toString());

        ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
        AppendRowsResponse response = future.get();
        System.out.println("Appended records successfully." + response.toString());
        //////////////////////
            /*
        while (value2.hasNext()) {
          // Create a JSON object that is compatible with the table schema.
          // JSONObject record = new JSONObject();

          // record.put("col1", String.format("record %03d", value.next()));
          JSONArray jsonArr = value2.next();
          // jsonArr.put(value2.next());
          // System.out.println(jsonArr.get(0).toString());

          System.out.println("Inside the loop:"+jsonArr);
          // jsonArr =

          // To detect duplicate records, pass the index as the record offset.
          // To disable deduplication, omit the offset or use WriteStream.Type.DEFAULT.
          ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
          
          AppendRowsResponse response = future.get();
          System.out.println("Appended records successfully." + response.toString());
        }*/
      }

      return true;
    } catch (ExecutionException e) {
      // If the wrapped exception is a StatusRuntimeException, check the state of the
      // operation.
      // If the state is INTERNAL, CANCELLED, or ABORTED, you can retry. For more
      // information, see:
      // https://grpc.github.io/grpc-java/javadoc/io/grpc/StatusRuntimeException.html
      System.out.println("Failed to append records. \n" );
      e.printStackTrace();
      return false;
    }
  }
}

【问题讨论】:

  • 确保您的 CF 和 BQ 在同一个项目上。另外,您在项目中拥有哪些角色级别?
  • 是的,两者都在同一个项目中

标签: java json google-bigquery google-cloud-functions


【解决方案1】:

嗯,这应该适合你。

首先,如果您需要两个 Google Cloud API/服务,例如CF 和 BQ 要相互交互,您需要创建一个 Service 帐户,然后给该帐户一个预定义的角色,该角色是与您的案例中的其他 API 服务交互所需的角色 (BigQuery Admin-roles/bigquery.admin)。

解决方案:

  1. 为 CF 创建一个服务帐户。

  2. 您将获得一个包含凭据信息和电子邮件地址的 json 文件。

  3. 提供此电子邮件地址 BigQuery Admin-roles/bigquery.admin。

  4. 在您的 CF 代码中包含要从您的 CF 中使用的所有凭据信息 Check this Samples Code from Google cloud

  5. 部署您的 CF。

这个过程适用于我的 Python。

更多信息:

Understanding service accounts

【讨论】:

  • 下载服务帐户密钥并将其添加到代码中不是安全方式,我们的客户不会允许这种方式。
猜你喜欢
  • 2021-10-03
  • 2020-11-05
  • 2020-09-25
  • 1970-01-01
  • 1970-01-01
  • 2019-01-06
  • 1970-01-01
  • 1970-01-01
  • 1970-01-01
相关资源
最近更新 更多