From 7e8a9317bf91f29775ed7d0e90e43c2cb2cc623a Mon Sep 17 00:00:00 2001 From: Chris Smith Date: Thu, 18 Oct 2018 14:49:12 -0600 Subject: [PATCH 1/5] Add QueryJob#num_dml_affected_rows [closes #2141] --- ...et_ddl_test.rb => dataset_ddl_dml_test.rb} | 26 ++++++++++++++++--- .../lib/google/cloud/bigquery/query_job.rb | 12 +++++++++ .../cloud/bigquery/project_query_job_test.rb | 1 + .../google/cloud/bigquery/query_job_test.rb | 2 ++ 4 files changed, 38 insertions(+), 3 deletions(-) rename google-cloud-bigquery/acceptance/bigquery/{dataset_ddl_test.rb => dataset_ddl_dml_test.rb} (67%) diff --git a/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_test.rb b/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb similarity index 67% rename from google-cloud-bigquery/acceptance/bigquery/dataset_ddl_test.rb rename to google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb index 0a125ccedb2b..9d1b3c97f2c8 100644 --- a/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_test.rb +++ b/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb @@ -14,7 +14,7 @@ require "bigquery_helper" -describe Google::Cloud::Bigquery::Dataset, :ddl, :bigquery do +describe Google::Cloud::Bigquery::Dataset, :ddl_dml, :bigquery do let(:dataset_id) { "#{prefix}_dataset" } let(:dataset) do d = bigquery.dataset dataset_id @@ -25,10 +25,10 @@ end let(:table_id) { "dataset_ddl_table_#{SecureRandom.hex(16)}" } - it "creates and drops a table with ddl stats" do create_job = dataset.query_job "CREATE TABLE #{table_id} (x INT64)" create_job.wait_until_done! + create_job.wont_be :failed? create_job.statement_type.must_equal "CREATE_TABLE" create_job.ddl_operation_performed.must_equal "CREATE" @@ -39,10 +39,29 @@ table_ref.table_id.must_equal table_id table_ref.reference?.must_equal true table_ref.exists?.must_equal true + create_job.num_dml_affected_rows.must_be :nil? + + insert_job = dataset.query_job "INSERT #{table_id} (x) VALUES(101),(102)" + insert_job.wait_until_done! + insert_job.wont_be :failed? + insert_job.statement_type.must_equal "INSERT" + insert_job.num_dml_affected_rows.must_equal 2 + + update_job = dataset.query_job "UPDATE #{table_id} SET x = x + 1 WHERE x IS NOT NULL" + update_job.wait_until_done! + update_job.wont_be :failed? + update_job.statement_type.must_equal "UPDATE" + update_job.num_dml_affected_rows.must_equal 2 + + delete_job = dataset.query_job "DELETE #{table_id} WHERE x = 103" + delete_job.wait_until_done! + delete_job.wont_be :failed? + delete_job.statement_type.must_equal "DELETE" + delete_job.num_dml_affected_rows.must_equal 1 drop_job = dataset.query_job "DROP TABLE #{table_id}" drop_job.wait_until_done! - + drop_job.wont_be :failed? drop_job.statement_type.must_equal "DROP_TABLE" drop_job.ddl_operation_performed.must_equal "DROP" table_ref_2 = create_job.ddl_target_table @@ -52,5 +71,6 @@ table_ref_2.table_id.must_equal table_id table_ref_2.reference?.must_equal true table_ref_2.exists?.must_equal false + drop_job.num_dml_affected_rows.must_be :nil? end end diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb index bb6101e8abcf..82d0748a4c78 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb @@ -253,6 +253,18 @@ def ddl_target_table Google::Cloud::Bigquery::Table.new_reference_from_gapi table, service end + ## + # The number of rows affected by a DML statement. Present only for DML + # statements `INSERT`, `UPDATE` or `DELETE`. (See {#statement_type}.) + # + # @return [Integer, nil] The number of rows affected by a DML statement, + # or `nil` if the query is not a DML statement. + # + def num_dml_affected_rows + return nil unless @gapi.statistics.query + @gapi.statistics.query.num_dml_affected_rows + end + ## # The table in which the query results are stored. # diff --git a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb index 3e56aefc78f5..c095fd0575d4 100644 --- a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb +++ b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb @@ -47,6 +47,7 @@ job.statement_type.must_be :nil? job.ddl_operation_performed.must_be :nil? job.ddl_target_table.must_be :nil? + job.num_dml_affected_rows.must_be :nil? end it "queries the data with options set" do diff --git a/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb b/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb index 8132fd456af6..07c008144e02 100644 --- a/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb +++ b/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb @@ -59,6 +59,7 @@ job.ddl_target_table.project_id.must_equal "target_project_id" job.ddl_target_table.dataset_id.must_equal "target_dataset_id" job.ddl_target_table.table_id.must_equal "target_table_id" + job.num_dml_affected_rows.must_equal 50 job.statement_type.must_equal "CREATE_TABLE" end @@ -165,6 +166,7 @@ def statistics_query_gapi dataset_id: "target_dataset_id", table_id: "target_table_id" ), + num_dml_affected_rows: 50, # Present only for DML statements INSERT, UPDATE or DELETE. query_plan: [ Google::Apis::BigqueryV2::ExplainQueryStage.new( compute_ratio_avg: 1.0, From 6ecf32bf9635dc71edcf96fc686031d48c324e0f Mon Sep 17 00:00:00 2001 From: Chris Smith Date: Thu, 18 Oct 2018 15:08:37 -0600 Subject: [PATCH 2/5] Add ddl/dml docs and examples to #query_job --- .../lib/google/cloud/bigquery/dataset.rb | 32 ++++++++++++++++- .../lib/google/cloud/bigquery/project.rb | 35 ++++++++++++++++++- 2 files changed, 65 insertions(+), 2 deletions(-) diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb index 0175e7b42f08..fd15438fa893 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb @@ -674,6 +674,8 @@ def tables token: nil, max: nil ## # Queries data by creating a [query # job](https://cloud.google.com/bigquery/docs/query-overview#query_jobs). + # Use this method rather than {#query} for executing DDL/DML statements, + # since this method does not automatically return table data. # # Sets the current dataset as the default dataset in the query. Useful # for using unqualified table names. @@ -874,6 +876,32 @@ def tables token: nil, max: nil # end # end # + # @example Execute a DDL statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # job = bigquery.query_job "CREATE TABLE my_table (x INT64)" + # + # job.wait_until_done! + # if !job.failed? + # table_ref = job.ddl_target_table + # end + # + # @example Execute a DML statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # job = bigquery.query_job "UPDATE my_table " \ + # "SET x = x + 1 " \ + # "WHERE x IS NOT NULL" + # + # job.wait_until_done! + # if !job.failed? + # puts job.num_dml_affected_rows + # end + # # @example Query using external data source, set destination: # require "google/cloud/bigquery" # @@ -930,7 +958,9 @@ def query_job query, params: nil, external: nil, # Queries data and waits for the results. In this method, a {QueryJob} # is created and its results are saved to a temporary table, then read # from the table. Timeouts and transient errors are generally handled - # as needed to complete the query. + # as needed to complete the query. Use {#query_job} rather than this + # method for executing DDL/DML statements, since this method + # automatically returns table data. # # Sets the current dataset as the default dataset in the query. Useful # for using unqualified table names. diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb index 4c47a2c5f065..f133c2883c8c 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb @@ -98,6 +98,8 @@ def service_account_email ## # Queries data by creating a [query # job](https://cloud.google.com/bigquery/docs/query-overview#query_jobs). + # Use this method rather than {#query} for executing DDL/DML statements, + # since this method does not automatically return table data. # # When using standard SQL and passing arguments using `params`, Ruby # types are mapped to BigQuery types as follows: @@ -302,6 +304,35 @@ def service_account_email # end # end # + # @example Execute a DDL statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # job = bigquery.query_job "CREATE TABLE " \ + # "`my_dataset.my_table` " \ + # "(x INT64)" + # + # job.wait_until_done! + # if !job.failed? + # table_ref = job.ddl_target_table + # end + # + # @example Execute a DML statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # job = bigquery.query_job "UPDATE " \ + # "`my_dataset.my_table` " \ + # "SET x = x + 1 " \ + # "WHERE x IS NOT NULL" + # + # job.wait_until_done! + # if !job.failed? + # puts job.num_dml_affected_rows + # end + # # @example Query using external data source, set destination: # require "google/cloud/bigquery" # @@ -356,7 +387,9 @@ def query_job query, params: nil, external: nil, # Queries data and waits for the results. In this method, a {QueryJob} # is created and its results are saved to a temporary table, then read # from the table. Timeouts and transient errors are generally handled - # as needed to complete the query. + # as needed to complete the query. Use {#query_job} rather than this + # method for executing DDL/DML statements, since this method + # automatically returns table data. # # When using standard SQL and passing arguments using `params`, Ruby # types are mapped to BigQuery types as follows: From 564728031ac9d86ce4e244ddaec9f04ea6c6c2db Mon Sep 17 00:00:00 2001 From: Chris Smith Date: Mon, 22 Oct 2018 16:45:31 -0600 Subject: [PATCH 3/5] Add DDL/DML attrs to Data Add job_gapi to Data. Update QueryJob#data to conditionally return empty Data. --- .../bigquery/dataset_ddl_dml_test.rb | 72 +++++++++++--- .../lib/google/cloud/bigquery/data.rb | 95 ++++++++++++++++++- .../lib/google/cloud/bigquery/query_job.rb | 16 +++- .../lib/google/cloud/bigquery/table.rb | 2 +- 4 files changed, 162 insertions(+), 23 deletions(-) diff --git a/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb b/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb index 9d1b3c97f2c8..e9eb749a5cfb 100644 --- a/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb +++ b/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb @@ -24,21 +24,16 @@ d end let(:table_id) { "dataset_ddl_table_#{SecureRandom.hex(16)}" } + let(:table_id_2) { "dataset_ddl_table_#{SecureRandom.hex(16)}" } - it "creates and drops a table with ddl stats" do + it "creates and populates and drops a table with ddl/dml query jobs" do create_job = dataset.query_job "CREATE TABLE #{table_id} (x INT64)" create_job.wait_until_done! create_job.wont_be :failed? create_job.statement_type.must_equal "CREATE_TABLE" create_job.ddl_operation_performed.must_equal "CREATE" - table_ref = create_job.ddl_target_table - table_ref.must_be_kind_of Google::Cloud::Bigquery::Table - table_ref.project_id.must_equal bigquery.project - table_ref.dataset_id.must_equal dataset_id - table_ref.table_id.must_equal table_id - table_ref.reference?.must_equal true - table_ref.exists?.must_equal true + assert_table_ref create_job.ddl_target_table, table_id create_job.num_dml_affected_rows.must_be :nil? insert_job = dataset.query_job "INSERT #{table_id} (x) VALUES(101),(102)" @@ -64,13 +59,60 @@ drop_job.wont_be :failed? drop_job.statement_type.must_equal "DROP_TABLE" drop_job.ddl_operation_performed.must_equal "DROP" - table_ref_2 = create_job.ddl_target_table - table_ref_2.must_be_kind_of Google::Cloud::Bigquery::Table - table_ref_2.project_id.must_equal bigquery.project - table_ref_2.dataset_id.must_equal dataset_id - table_ref_2.table_id.must_equal table_id - table_ref_2.reference?.must_equal true - table_ref_2.exists?.must_equal false + assert_table_ref drop_job.ddl_target_table, table_id, exists: false drop_job.num_dml_affected_rows.must_be :nil? end + + it "creates and populates and drops a table with ddl/dml queries" do + create_data = dataset.query "CREATE TABLE #{table_id_2} (x INT64)" + assert_table_ref create_data.ddl_target_table, table_id_2 + create_data.statement_type.must_equal "CREATE_TABLE" + create_data.ddl?.must_equal true + create_data.dml?.must_equal false + create_data.ddl_operation_performed.must_equal "CREATE" + create_data.num_dml_affected_rows.must_be :nil? + create_data.total.must_equal 0 + create_data.next?.must_equal false + create_data.next.must_be :nil? + create_data.all.must_be_kind_of Enumerator + create_data.count.must_equal 0 + create_data.to_a.must_equal [] + + insert_data = dataset.query "INSERT #{table_id_2} (x) VALUES(101),(102)" + insert_data.ddl_target_table.must_be :nil? + insert_data.statement_type.must_equal "INSERT" + insert_data.ddl?.must_equal false + insert_data.dml?.must_equal true + insert_data.ddl_operation_performed.must_be :nil? + insert_data.num_dml_affected_rows.must_equal 2 + insert_data.total.must_equal 0 + insert_data.next?.must_equal false + insert_data.next.must_be :nil? + insert_data.all.must_be_kind_of Enumerator + insert_data.count.must_equal 0 + insert_data.to_a.must_equal [] + + update_data = dataset.query "UPDATE #{table_id_2} SET x = x + 1 WHERE x IS NOT NULL" + update_data.statement_type.must_equal "UPDATE" + update_data.num_dml_affected_rows.must_equal 2 + + delete_data = dataset.query "DELETE #{table_id_2} WHERE x = 103" + delete_data.statement_type.must_equal "DELETE" + delete_data.num_dml_affected_rows.must_equal 1 + + drop_data = dataset.query "DROP TABLE #{table_id_2}" + drop_data.statement_type.must_equal "DROP_TABLE" + drop_data.ddl_operation_performed.must_equal "DROP" + drop_data.num_dml_affected_rows.must_be :nil? + assert_table_ref drop_data.ddl_target_table, table_id_2, exists: false + end + + def assert_table_ref table_ref, table_id, exists: true + table_ref.must_be_kind_of Google::Cloud::Bigquery::Table + table_ref.project_id.must_equal bigquery.project + table_ref.dataset_id.must_equal dataset_id + table_ref.table_id.must_equal table_id + table_ref.reference?.must_equal true + table_ref.exists?.must_equal exists + end end diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb index c2f6f32bbc94..ca4d658504bd 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb @@ -59,6 +59,10 @@ class Data < DelegateClass(::Array) # @private The Google API Client object in JSON Hash. attr_accessor :gapi_json + ## + # @private The query Job gapi object, or nil if from Table#data. + attr_accessor :job_gapi + # @private def initialize arr = [] @service = nil @@ -195,6 +199,84 @@ def headers schema.headers end + ## + # The type of query statement, if valid. Possible values (new values + # might be added in the future): + # + # * "SELECT": `SELECT` query. + # * "INSERT": `INSERT` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language + # * "UPDATE": `UPDATE` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language + # * "DELETE": `DELETE` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language + # * "CREATE_TABLE": `CREATE [OR REPLACE] TABLE` without `AS SELECT`. + # * "CREATE_TABLE_AS_SELECT": `CREATE [OR REPLACE] TABLE ... AS SELECT`. + # * "DROP_TABLE": `DROP TABLE` query. + # * "CREATE_VIEW": `CREATE [OR REPLACE] VIEW ... AS SELECT ...`. + # * "DROP_VIEW": `DROP VIEW` query. + # + # @return [String, nil] The type of query statement. + # + def statement_type + return nil unless job_gapi && job_gapi.statistics.query + job_gapi.statistics.query.statement_type + end + + def ddl? + %w[CREATE_TABLE CREATE_TABLE_AS_SELECT DROP_TABLE CREATE_VIEW \ + DROP_VIEW].include? statement_type + end + + def dml? + %w[INSERT UPDATE DELETE].include? statement_type + end + + ## + # The DDL operation performed, possibly dependent on the pre-existence + # of the DDL target. (See {#ddl_target_table}.) Possible values (new + # values might be added in the future): + # + # * "CREATE": The query created the DDL target. + # * "SKIP": No-op. Example cases: the query is + # `CREATE TABLE IF NOT EXISTS` while the table already exists, or the + # query is `DROP TABLE IF EXISTS` while the table does not exist. + # * "REPLACE": The query replaced the DDL target. Example case: the + # query is `CREATE OR REPLACE TABLE`, and the table already exists. + # * "DROP": The query deleted the DDL target. + # + # @return [String, nil] The DDL operation performed. + # + def ddl_operation_performed + return nil unless job_gapi && job_gapi.statistics.query + job_gapi.statistics.query.ddl_operation_performed + end + + ## + # The DDL target table, in reference state. (See {Table#reference?}.) + # Present only for `CREATE/DROP TABLE/VIEW` queries. (See + # {#statement_type}.) + # + # @return [Google::Cloud::Bigquery::Table, nil] The DDL target table, in + # reference state. + # + def ddl_target_table + return nil unless job_gapi && job_gapi.statistics.query + ensure_service! + table = job_gapi.statistics.query.ddl_target_table + return nil unless table + Google::Cloud::Bigquery::Table.new_reference_from_gapi table, service + end + + ## + # The number of rows affected by a DML statement. Present only for DML + # statements `INSERT`, `UPDATE` or `DELETE`. (See {#statement_type}.) + # + # @return [Integer, nil] The number of rows affected by a DML statement, + # or `nil` if the query is not a DML statement. + # + def num_dml_affected_rows + return nil unless job_gapi && job_gapi.statistics.query + job_gapi.statistics.query.num_dml_affected_rows + end + ## # Whether there is a next page of data. # @@ -252,7 +334,7 @@ def next @table_gapi.table_reference.dataset_id, @table_gapi.table_reference.table_id, token: token - self.class.from_gapi_json data_json, @table_gapi, @service + self.class.from_gapi_json data_json, @table_gapi, job_gapi, @service end ## @@ -327,13 +409,16 @@ def all request_limit: nil ## # @private New Data from a response object. - def self.from_gapi_json gapi_json, table_gapi, service - formatted_rows = Convert.format_rows(gapi_json[:rows], - table_gapi.schema.fields) + def self.from_gapi_json gapi_json, table_gapi, job_gapi, service + rows = gapi_json[:rows] || [] + unless rows.empty? + rows = Convert.format_rows rows, table_gapi.schema.fields + end - data = new formatted_rows + data = new rows data.table_gapi = table_gapi data.gapi_json = gapi_json + data.job_gapi = job_gapi data.service = service data end diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb index 82d0748a4c78..ced8f095c0fb 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb @@ -217,6 +217,15 @@ def statement_type @gapi.statistics.query.statement_type end + def ddl? + %w[CREATE_TABLE CREATE_TABLE_AS_SELECT DROP_TABLE CREATE_VIEW \ + DROP_VIEW].include? statement_type + end + + def dml? + %w[INSERT UPDATE DELETE].include? statement_type + end + ## # The DDL operation performed, possibly dependent on the pre-existence # of the DDL target. (See {#ddl_target_table}.) Possible values (new @@ -502,7 +511,10 @@ def wait_until_done! # def data token: nil, max: nil, start: nil return nil unless done? - + if ddl? || dml? + data_hash = { totalRows: "0", rows: [] } + return Data.from_gapi_json data_hash, nil, @gapi, service + end ensure_schema! options = { token: token, max: max, start: start } @@ -510,7 +522,7 @@ def data token: nil, max: nil, start: nil destination_table_dataset_id, destination_table_table_id, options - Data.from_gapi_json data_hash, destination_table_gapi, service + Data.from_gapi_json data_hash, destination_table_gapi, @gapi, service end alias query_results data diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/table.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/table.rb index fc80ec17b02d..007252bb0376 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/table.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/table.rb @@ -1173,7 +1173,7 @@ def data token: nil, max: nil, start: nil options = { token: token, max: max, start: start } data_json = service.list_tabledata \ dataset_id, table_id, options - Data.from_gapi_json data_json, gapi, service + Data.from_gapi_json data_json, gapi, nil, service end ## From fd98e3d27063228adec04f754bc7cda70cc08ee5 Mon Sep 17 00:00:00 2001 From: Chris Smith Date: Tue, 23 Oct 2018 16:39:40 -0600 Subject: [PATCH 4/5] Update DDL/DML docs and tests --- .../lib/google/cloud/bigquery/data.rb | 70 +++++++++++++++---- .../lib/google/cloud/bigquery/dataset.rb | 27 +++++-- .../lib/google/cloud/bigquery/project.rb | 27 +++++-- .../lib/google/cloud/bigquery/query_job.rb | 70 +++++++++++++++---- .../support/doctest_helper.rb | 22 ++++-- .../test/google/cloud/bigquery/data_test.rb | 7 ++ .../cloud/bigquery/project_query_job_test.rb | 10 +-- .../cloud/bigquery/project_query_test.rb | 47 +++++++++++++ .../google/cloud/bigquery/query_job_test.rb | 54 +++----------- google-cloud-bigquery/test/helper.rb | 51 +++++++++++++- 10 files changed, 296 insertions(+), 89 deletions(-) diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb index ca4d658504bd..cef636d5bab7 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/data.rb @@ -203,15 +203,25 @@ def headers # The type of query statement, if valid. Possible values (new values # might be added in the future): # - # * "SELECT": `SELECT` query. - # * "INSERT": `INSERT` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language - # * "UPDATE": `UPDATE` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language - # * "DELETE": `DELETE` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language - # * "CREATE_TABLE": `CREATE [OR REPLACE] TABLE` without `AS SELECT`. - # * "CREATE_TABLE_AS_SELECT": `CREATE [OR REPLACE] TABLE ... AS SELECT`. - # * "DROP_TABLE": `DROP TABLE` query. - # * "CREATE_VIEW": `CREATE [OR REPLACE] VIEW ... AS SELECT ...`. - # * "DROP_VIEW": `DROP VIEW` query. + # * "CREATE_MODEL": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "CREATE_TABLE": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "CREATE_TABLE_AS_SELECT": DDL statement, see [Using Data Definition + # Language Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "CREATE_VIEW": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "DELETE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) + # * "DROP_MODEL": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "DROP_TABLE": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "DROP_VIEW": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "INSERT": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) + # * "MERGE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) + # * "SELECT": SQL query, see [Standard SQL Query Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax) + # * "UPDATE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) # # @return [String, nil] The type of query statement. # @@ -220,13 +230,49 @@ def statement_type job_gapi.statistics.query.statement_type end + ## + # Whether the query that created this data was a DDL statement. + # + # @see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language + # Using Data Definition Language Statements + # + # @return [Boolean] + # + # @example + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # data = bigquery.query "CREATE TABLE my_table (x INT64)" + # + # data.statement_type #=> "CREATE_TABLE" + # data.ddl? #=> true + # def ddl? - %w[CREATE_TABLE CREATE_TABLE_AS_SELECT DROP_TABLE CREATE_VIEW \ - DROP_VIEW].include? statement_type + %w[CREATE_MODEL CREATE_TABLE CREATE_TABLE_AS_SELECT CREATE_VIEW \ + DROP_MODEL DROP_TABLE DROP_VIEW].include? statement_type end + ## + # Whether the query that created this data was a DML statement. + # + # @see https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax + # Data Manipulation Language Syntax + # + # @return [Boolean] + # + # @example + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # data = bigquery.query "UPDATE my_table " \ + # "SET x = x + 1 " \ + # "WHERE x IS NOT NULL" + # + # data.statement_type #=> "UPDATE" + # data.dml? #=> true + # def dml? - %w[INSERT UPDATE DELETE].include? statement_type + %w[INSERT UPDATE MERGE DELETE].include? statement_type end ## diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb index fd15438fa893..6e4bd84cce99 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb @@ -674,8 +674,6 @@ def tables token: nil, max: nil ## # Queries data by creating a [query # job](https://cloud.google.com/bigquery/docs/query-overview#query_jobs). - # Use this method rather than {#query} for executing DDL/DML statements, - # since this method does not automatically return table data. # # Sets the current dataset as the default dataset in the query. Useful # for using unqualified table names. @@ -958,9 +956,8 @@ def query_job query, params: nil, external: nil, # Queries data and waits for the results. In this method, a {QueryJob} # is created and its results are saved to a temporary table, then read # from the table. Timeouts and transient errors are generally handled - # as needed to complete the query. Use {#query_job} rather than this - # method for executing DDL/DML statements, since this method - # automatically returns table data. + # as needed to complete the query. When used for executing DDL/DML + # statements, this method does not return row data. # # Sets the current dataset as the default dataset in the query. Useful # for using unqualified table names. @@ -1096,6 +1093,26 @@ def query_job query, params: nil, external: nil, # puts row[:name] # end # + # @example Execute a DDL statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # data = bigquery.query "CREATE TABLE my_table (x INT64)" + # + # table_ref = data.ddl_target_table + # + # @example Execute a DML statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # data = bigquery.query "UPDATE my_table " \ + # "SET x = x + 1 " \ + # "WHERE x IS NOT NULL" + # + # puts data.num_dml_affected_rows + # # @example Query using external data source, set destination: # require "google/cloud/bigquery" # diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb index f133c2883c8c..0d4868817b1c 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/project.rb @@ -98,8 +98,6 @@ def service_account_email ## # Queries data by creating a [query # job](https://cloud.google.com/bigquery/docs/query-overview#query_jobs). - # Use this method rather than {#query} for executing DDL/DML statements, - # since this method does not automatically return table data. # # When using standard SQL and passing arguments using `params`, Ruby # types are mapped to BigQuery types as follows: @@ -387,9 +385,8 @@ def query_job query, params: nil, external: nil, # Queries data and waits for the results. In this method, a {QueryJob} # is created and its results are saved to a temporary table, then read # from the table. Timeouts and transient errors are generally handled - # as needed to complete the query. Use {#query_job} rather than this - # method for executing DDL/DML statements, since this method - # automatically returns table data. + # as needed to complete the query. When used for executing DDL/DML + # statements, this method does not return row data. # # When using standard SQL and passing arguments using `params`, Ruby # types are mapped to BigQuery types as follows: @@ -538,6 +535,26 @@ def query_job query, params: nil, external: nil, # puts row[:name] # end # + # @example Execute a DDL statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # data = bigquery.query "CREATE TABLE `my_dataset.my_table` (x INT64)" + # + # table_ref = data.ddl_target_table + # + # @example Execute a DML statement: + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # + # data = bigquery.query "UPDATE `my_dataset.my_table` " \ + # "SET x = x + 1 " \ + # "WHERE x IS NOT NULL" + # + # puts data.num_dml_affected_rows + # # @example Query using external data source, set destination: # require "google/cloud/bigquery" # diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb index ced8f095c0fb..8a644be1ce36 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb @@ -200,15 +200,25 @@ def query_plan # The type of query statement, if valid. Possible values (new values # might be added in the future): # - # * "SELECT": `SELECT` query. - # * "INSERT": `INSERT` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language - # * "UPDATE": `UPDATE` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language - # * "DELETE": `DELETE` query; see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language - # * "CREATE_TABLE": `CREATE [OR REPLACE] TABLE` without `AS SELECT`. - # * "CREATE_TABLE_AS_SELECT": `CREATE [OR REPLACE] TABLE ... AS SELECT`. - # * "DROP_TABLE": `DROP TABLE` query. - # * "CREATE_VIEW": `CREATE [OR REPLACE] VIEW ... AS SELECT ...`. - # * "DROP_VIEW": `DROP VIEW` query. + # * "CREATE_MODEL": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "CREATE_TABLE": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "CREATE_TABLE_AS_SELECT": DDL statement, see [Using Data Definition + # Language Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "CREATE_VIEW": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "DELETE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) + # * "DROP_MODEL": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "DROP_TABLE": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "DROP_VIEW": DDL statement, see [Using Data Definition Language + # Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language) + # * "INSERT": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) + # * "MERGE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) + # * "SELECT": SQL query, see [Standard SQL Query Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax) + # * "UPDATE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax) # # @return [String, nil] The type of query statement. # @@ -217,13 +227,49 @@ def statement_type @gapi.statistics.query.statement_type end + ## + # Whether the query is a DDL statement. + # + # @see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language + # Using Data Definition Language Statements + # + # @return [Boolean] + # + # @example + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # query_job = bigquery.query_job "CREATE TABLE my_table (x INT64)" + # + # query_job.statement_type #=> "CREATE_TABLE" + # query_job.ddl? #=> true + # def ddl? - %w[CREATE_TABLE CREATE_TABLE_AS_SELECT DROP_TABLE CREATE_VIEW \ - DROP_VIEW].include? statement_type + %w[CREATE_MODEL CREATE_TABLE CREATE_TABLE_AS_SELECT CREATE_VIEW \ + DROP_MODEL DROP_TABLE DROP_VIEW].include? statement_type end + ## + # Whether the query is a DML statement. + # + # @see https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax + # Data Manipulation Language Syntax + # + # @return [Boolean] + # + # @example + # require "google/cloud/bigquery" + # + # bigquery = Google::Cloud::Bigquery.new + # query_job = bigquery.query_job "UPDATE my_table " \ + # "SET x = x + 1 " \ + # "WHERE x IS NOT NULL" + # + # query_job.statement_type #=> "UPDATE" + # query_job.dml? #=> true + # def dml? - %w[INSERT UPDATE DELETE].include? statement_type + %w[INSERT UPDATE MERGE DELETE].include? statement_type end ## diff --git a/google-cloud-bigquery/support/doctest_helper.rb b/google-cloud-bigquery/support/doctest_helper.rb index 3bbe362b8814..7db65b06361f 100644 --- a/google-cloud-bigquery/support/doctest_helper.rb +++ b/google-cloud-bigquery/support/doctest_helper.rb @@ -130,6 +130,12 @@ def mock_storage end end + doctest.before "Google::Cloud::Bigquery::Data#dml?" do + mock_bigquery do |mock| + mock.expect :insert_job, query_job_gapi(statement_type: "UPDATE"), ["my-project", Google::Apis::BigqueryV2::Job] + end + end + doctest.before "Google::Cloud::Bigquery::Data#fields" do mock_bigquery do |mock| mock.expect :get_dataset, dataset_full_gapi, ["my-project", "my_dataset"] @@ -586,6 +592,13 @@ def mock_storage end end + doctest.before "Google::Cloud::Bigquery::QueryJob#dml?" do + mock_bigquery do |mock| + mock.expect :get_dataset, dataset_full_gapi, ["my-project", "my_dataset"] + mock.expect :insert_job, query_job_gapi(statement_type: "UPDATE"), ["my-project", Google::Apis::BigqueryV2::Job] + end + end + # Google::Cloud::Bigquery::Schema#record doctest.before "Google::Cloud::Bigquery::Schema" do mock_bigquery do |mock| @@ -1166,11 +1179,11 @@ def query_data_hash token: "token1234567890" } end -def query_job_gapi - Google::Apis::BigqueryV2::Job.from_json query_job_hash.to_json +def query_job_gapi statement_type: "CREATE_TABLE" + Google::Apis::BigqueryV2::Job.from_json query_job_hash(statement_type: statement_type).to_json end -def query_job_hash +def query_job_hash statement_type: "CREATE_TABLE" hash = random_job_hash hash["configuration"]["query"] = { "query" => "SELECT name, age, score, active FROM `users`", @@ -1205,7 +1218,8 @@ def query_job_hash } ] } - ] + ], + "statementType" => statement_type } hash end diff --git a/google-cloud-bigquery/test/google/cloud/bigquery/data_test.rb b/google-cloud-bigquery/test/google/cloud/bigquery/data_test.rb index d1c107ff2397..b40b43f971bc 100644 --- a/google-cloud-bigquery/test/google/cloud/bigquery/data_test.rb +++ b/google-cloud-bigquery/test/google/cloud/bigquery/data_test.rb @@ -88,6 +88,13 @@ data.etag.must_equal "etag1234567890" data.token.must_equal "token1234567890" data.total.must_equal 3 + + data.statement_type.must_be :nil? + data.ddl?.must_equal false + data.dml?.must_equal false + data.ddl_operation_performed.must_be :nil? + data.ddl_target_table.must_be :nil? + data.num_dml_affected_rows.must_be :nil? end it "knows schema, fields, and headers" do diff --git a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb index c095fd0575d4..509b5e1781d9 100644 --- a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb +++ b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_job_test.rb @@ -41,10 +41,12 @@ job.must_be_kind_of Google::Cloud::Bigquery::QueryJob # Sometimes statistics.query is nil in the returned job, test for that behavior here. - job.cache_hit?.must_equal false - job.bytes_processed.must_be :nil? - job.query_plan.must_be :nil? - job.statement_type.must_be :nil? + job.cache_hit?.must_equal true + job.bytes_processed.must_equal 123456 + job.query_plan.wont_be :nil? + job.statement_type.must_equal "SELECT" + job.ddl?.must_equal false + job.dml?.must_equal false job.ddl_operation_performed.must_be :nil? job.ddl_target_table.must_be :nil? job.num_dml_affected_rows.must_be :nil? diff --git a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb index b9d04fe65b34..9c3549856712 100644 --- a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb +++ b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb @@ -16,6 +16,8 @@ describe Google::Cloud::Bigquery::Project, :query, :mock_bigquery do let(:query) { "SELECT name, age, score, active FROM `some_project.some_dataset.users`" } + let(:ddl_query) { "CREATE TABLE `my_dataset.my_table` (x INT64)" } + let(:dml_query) { "UPDATE `my_dataset.my_table` SET x = x + 1 WHERE x IS NOT NULL" } let(:job_id) { "job_9876543210" } let(:dataset_id) { "my_dataset" } @@ -62,6 +64,51 @@ data[2][:active].must_be :nil? end + it "executes a DDL statement" do + mock = Minitest::Mock.new + bigquery.service.mocked_service = mock + + job_gapi = query_job_gapi ddl_query, location: nil + resp_gapi = query_job_resp_gapi ddl_query, job_id: job_id, target_table: true, statement_type: "CREATE_TABLE", ddl_operation_performed: "CREATE" + mock.expect :insert_job, resp_gapi, [project, job_gapi] + + data = bigquery.query ddl_query + mock.verify + # data.must_be_kind_of Google::Cloud::Bigquery::Data + data.class.must_equal Google::Cloud::Bigquery::Data + data.count.must_equal 0 + data.total.must_equal 0 + + data.statement_type.must_equal "CREATE_TABLE" + data.ddl?.must_equal true + data.dml?.must_equal false + data.ddl_operation_performed.must_equal "CREATE" + data.ddl_target_table.wont_be :nil? + data.num_dml_affected_rows.must_be :nil? + end + + it "executes a DML statement" do + mock = Minitest::Mock.new + bigquery.service.mocked_service = mock + + job_gapi = query_job_gapi dml_query, location: nil + resp_gapi = query_job_resp_gapi ddl_query, job_id: job_id, statement_type: "UPDATE", num_dml_affected_rows: 50 + mock.expect :insert_job, resp_gapi, [project, job_gapi] + + data = bigquery.query dml_query + mock.verify + # data.must_be_kind_of Google::Cloud::Bigquery::Data + data.class.must_equal Google::Cloud::Bigquery::Data + data.count.must_equal 0 + data.total.must_equal 0 + + data.statement_type.must_equal "UPDATE" + data.ddl?.must_equal false + data.dml?.must_equal true + data.ddl_operation_performed.must_be :nil? + data.num_dml_affected_rows.must_equal 50 + end + it "paginates the data" do mock = Minitest::Mock.new bigquery.service.mocked_service = mock diff --git a/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb b/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb index 07c008144e02..a0f9e4c153ec 100644 --- a/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb +++ b/google-cloud-bigquery/test/google/cloud/bigquery/query_job_test.rb @@ -17,7 +17,8 @@ require "uri" describe Google::Cloud::Bigquery::QueryJob, :mock_bigquery do - let(:job) { Google::Cloud::Bigquery::Job.from_gapi query_job_gapi, + let(:job_gapi) { query_job_gapi target_table: true, statement_type: "CREATE_TABLE", num_dml_affected_rows: 50, ddl_operation_performed: "CREATE" } + let(:job) { Google::Cloud::Bigquery::Job.from_gapi job_gapi, bigquery.service } let(:job_id) { job.job_id } @@ -61,6 +62,8 @@ job.ddl_target_table.table_id.must_equal "target_table_id" job.num_dml_affected_rows.must_equal 50 job.statement_type.must_equal "CREATE_TABLE" + job.ddl?.must_equal true + job.dml?.must_equal false end it "knows its query config" do @@ -109,9 +112,12 @@ job.udfs.last.must_equal "gs://my-bucket/my-lib.js" end - def query_job_gapi + def query_job_gapi target_table: false, statement_type: nil, num_dml_affected_rows: nil, ddl_operation_performed: nil gapi = Google::Apis::BigqueryV2::Job.from_json query_job_hash.to_json - gapi.statistics.query = statistics_query_gapi + gapi.statistics.query = statistics_query_gapi target_table: target_table, + statement_type: statement_type, + num_dml_affected_rows: num_dml_affected_rows, + ddl_operation_performed: ddl_operation_performed gapi end @@ -155,46 +161,4 @@ def destination_table_json } hash.to_json end - - def statistics_query_gapi - Google::Apis::BigqueryV2::JobStatistics2.new( - billing_tier: 1, - cache_hit: true, - ddl_operation_performed: "CREATE", - ddl_target_table: Google::Apis::BigqueryV2::TableReference.new( - project_id: "target_project_id", - dataset_id: "target_dataset_id", - table_id: "target_table_id" - ), - num_dml_affected_rows: 50, # Present only for DML statements INSERT, UPDATE or DELETE. - query_plan: [ - Google::Apis::BigqueryV2::ExplainQueryStage.new( - compute_ratio_avg: 1.0, - compute_ratio_max: 1.0, - id: 1, - name: "Stage 1", - read_ratio_avg: 0.2710832227382326, - read_ratio_max: 0.2710832227382326, - records_read: 164656, - records_written: 1, - status: "COMPLETE", - steps: [ - Google::Apis::BigqueryV2::ExplainQueryStep.new( - kind: "READ", - substeps: [ - "word", - "FROM bigquery-public-data:samples.shakespeare" - ] - ) - ], - wait_ratio_avg: 0.007876711656047392, - wait_ratio_max: 0.007876711656047392, - write_ratio_avg: 0.05389444608201358, - write_ratio_max: 0.05389444608201358 - ) - ], - statement_type: "CREATE_TABLE", - total_bytes_processed: 123456 - ) - end end diff --git a/google-cloud-bigquery/test/helper.rb b/google-cloud-bigquery/test/helper.rb index cebde57ebdc9..7d5c2b6c4b79 100644 --- a/google-cloud-bigquery/test/helper.rb +++ b/google-cloud-bigquery/test/helper.rb @@ -455,8 +455,10 @@ def job_reference_gapi project, job_id, location: "US" job_ref end - def query_job_resp_gapi query, job_id: nil - Google::Apis::BigqueryV2::Job.from_json query_job_resp_json(query, job_id: job_id) + def query_job_resp_gapi query, job_id: nil, target_table: false, statement_type: "SELECT", num_dml_affected_rows: nil, ddl_operation_performed: nil + gapi = Google::Apis::BigqueryV2::Job.from_json query_job_resp_json query, job_id: job_id + gapi.statistics.query = statistics_query_gapi target_table: target_table, statement_type: statement_type, num_dml_affected_rows: num_dml_affected_rows, ddl_operation_performed: ddl_operation_performed + gapi end def query_job_resp_json query, job_id: "job_9876543210", location: "US" @@ -486,6 +488,51 @@ def query_job_resp_json query, job_id: "job_9876543210", location: "US" hash.to_json end + def statistics_query_gapi target_table: false, statement_type: nil, num_dml_affected_rows: nil, ddl_operation_performed: nil + ddl_target_table = if target_table + Google::Apis::BigqueryV2::TableReference.new( + project_id: "target_project_id", + dataset_id: "target_dataset_id", + table_id: "target_table_id" + ) + end + Google::Apis::BigqueryV2::JobStatistics2.new( + billing_tier: 1, + cache_hit: true, + ddl_operation_performed: ddl_operation_performed, + ddl_target_table: ddl_target_table, + num_dml_affected_rows: num_dml_affected_rows, + query_plan: [ + Google::Apis::BigqueryV2::ExplainQueryStage.new( + compute_ratio_avg: 1.0, + compute_ratio_max: 1.0, + id: 1, + name: "Stage 1", + read_ratio_avg: 0.2710832227382326, + read_ratio_max: 0.2710832227382326, + records_read: 164656, + records_written: 1, + status: "COMPLETE", + steps: [ + Google::Apis::BigqueryV2::ExplainQueryStep.new( + kind: "READ", + substeps: [ + "word", + "FROM bigquery-public-data:samples.shakespeare" + ] + ) + ], + wait_ratio_avg: 0.007876711656047392, + wait_ratio_max: 0.007876711656047392, + write_ratio_avg: 0.05389444608201358, + write_ratio_max: 0.05389444608201358 + ) + ], + statement_type: statement_type, + total_bytes_processed: 123456 + ) + end + def failed_query_job_resp_gapi query, job_id: nil, reason: "accessDenied", location: "US" Google::Apis::BigqueryV2::Job.from_json failed_query_job_resp_json(query, job_id: job_id, reason: reason, location: location) end From f4644be3d13eaf9fb678b2732066ea0e1cff4fec Mon Sep 17 00:00:00 2001 From: Chris Smith Date: Tue, 23 Oct 2018 17:05:34 -0600 Subject: [PATCH 5/5] Update QueryJob#data totalRows value for DDL/DML to nil --- .../acceptance/bigquery/dataset_ddl_dml_test.rb | 4 ++-- google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb | 2 +- .../test/google/cloud/bigquery/project_query_test.rb | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb b/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb index e9eb749a5cfb..d1bac9b71e80 100644 --- a/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb +++ b/google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb @@ -71,7 +71,7 @@ create_data.dml?.must_equal false create_data.ddl_operation_performed.must_equal "CREATE" create_data.num_dml_affected_rows.must_be :nil? - create_data.total.must_equal 0 + create_data.total.must_be :nil? create_data.next?.must_equal false create_data.next.must_be :nil? create_data.all.must_be_kind_of Enumerator @@ -85,7 +85,7 @@ insert_data.dml?.must_equal true insert_data.ddl_operation_performed.must_be :nil? insert_data.num_dml_affected_rows.must_equal 2 - insert_data.total.must_equal 0 + insert_data.total.must_be :nil? insert_data.next?.must_equal false insert_data.next.must_be :nil? insert_data.all.must_be_kind_of Enumerator diff --git a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb index 8a644be1ce36..83dc2ebdf8a4 100644 --- a/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb +++ b/google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb @@ -558,7 +558,7 @@ def wait_until_done! def data token: nil, max: nil, start: nil return nil unless done? if ddl? || dml? - data_hash = { totalRows: "0", rows: [] } + data_hash = { totalRows: nil, rows: [] } return Data.from_gapi_json data_hash, nil, @gapi, service end ensure_schema! diff --git a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb index 9c3549856712..dcf91449e0f0 100644 --- a/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb +++ b/google-cloud-bigquery/test/google/cloud/bigquery/project_query_test.rb @@ -77,7 +77,7 @@ # data.must_be_kind_of Google::Cloud::Bigquery::Data data.class.must_equal Google::Cloud::Bigquery::Data data.count.must_equal 0 - data.total.must_equal 0 + data.total.must_be :nil? data.statement_type.must_equal "CREATE_TABLE" data.ddl?.must_equal true @@ -100,7 +100,7 @@ # data.must_be_kind_of Google::Cloud::Bigquery::Data data.class.must_equal Google::Cloud::Bigquery::Data data.count.must_equal 0 - data.total.must_equal 0 + data.total.must_be :nil? data.statement_type.must_equal "UPDATE" data.ddl?.must_equal false