Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
118 changes: 118 additions & 0 deletions google-cloud-bigquery/acceptance/bigquery/dataset_ddl_dml_test.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

require "bigquery_helper"

describe Google::Cloud::Bigquery::Dataset, :ddl_dml, :bigquery do
let(:dataset_id) { "#{prefix}_dataset" }
let(:dataset) do
d = bigquery.dataset dataset_id
if d.nil?
d = bigquery.create_dataset dataset_id
end
d
end
let(:table_id) { "dataset_ddl_table_#{SecureRandom.hex(16)}" }
let(:table_id_2) { "dataset_ddl_table_#{SecureRandom.hex(16)}" }

it "creates and populates and drops a table with ddl/dml query jobs" do
create_job = dataset.query_job "CREATE TABLE #{table_id} (x INT64)"
create_job.wait_until_done!
create_job.wont_be :failed?

create_job.statement_type.must_equal "CREATE_TABLE"
create_job.ddl_operation_performed.must_equal "CREATE"
assert_table_ref create_job.ddl_target_table, table_id
create_job.num_dml_affected_rows.must_be :nil?

insert_job = dataset.query_job "INSERT #{table_id} (x) VALUES(101),(102)"
insert_job.wait_until_done!
insert_job.wont_be :failed?
insert_job.statement_type.must_equal "INSERT"
insert_job.num_dml_affected_rows.must_equal 2

update_job = dataset.query_job "UPDATE #{table_id} SET x = x + 1 WHERE x IS NOT NULL"
update_job.wait_until_done!
update_job.wont_be :failed?
update_job.statement_type.must_equal "UPDATE"
update_job.num_dml_affected_rows.must_equal 2

delete_job = dataset.query_job "DELETE #{table_id} WHERE x = 103"
delete_job.wait_until_done!
delete_job.wont_be :failed?
delete_job.statement_type.must_equal "DELETE"
delete_job.num_dml_affected_rows.must_equal 1

drop_job = dataset.query_job "DROP TABLE #{table_id}"
drop_job.wait_until_done!
drop_job.wont_be :failed?
drop_job.statement_type.must_equal "DROP_TABLE"
drop_job.ddl_operation_performed.must_equal "DROP"
assert_table_ref drop_job.ddl_target_table, table_id, exists: false
drop_job.num_dml_affected_rows.must_be :nil?
end

it "creates and populates and drops a table with ddl/dml queries" do
create_data = dataset.query "CREATE TABLE #{table_id_2} (x INT64)"
assert_table_ref create_data.ddl_target_table, table_id_2
create_data.statement_type.must_equal "CREATE_TABLE"
create_data.ddl?.must_equal true
create_data.dml?.must_equal false
create_data.ddl_operation_performed.must_equal "CREATE"
create_data.num_dml_affected_rows.must_be :nil?
create_data.total.must_be :nil?
create_data.next?.must_equal false
create_data.next.must_be :nil?
create_data.all.must_be_kind_of Enumerator
create_data.count.must_equal 0
create_data.to_a.must_equal []

insert_data = dataset.query "INSERT #{table_id_2} (x) VALUES(101),(102)"
insert_data.ddl_target_table.must_be :nil?
insert_data.statement_type.must_equal "INSERT"
insert_data.ddl?.must_equal false
insert_data.dml?.must_equal true
insert_data.ddl_operation_performed.must_be :nil?
insert_data.num_dml_affected_rows.must_equal 2
insert_data.total.must_be :nil?
insert_data.next?.must_equal false
insert_data.next.must_be :nil?
insert_data.all.must_be_kind_of Enumerator
insert_data.count.must_equal 0
insert_data.to_a.must_equal []

update_data = dataset.query "UPDATE #{table_id_2} SET x = x + 1 WHERE x IS NOT NULL"
update_data.statement_type.must_equal "UPDATE"
update_data.num_dml_affected_rows.must_equal 2

delete_data = dataset.query "DELETE #{table_id_2} WHERE x = 103"
delete_data.statement_type.must_equal "DELETE"
delete_data.num_dml_affected_rows.must_equal 1

drop_data = dataset.query "DROP TABLE #{table_id_2}"
drop_data.statement_type.must_equal "DROP_TABLE"
drop_data.ddl_operation_performed.must_equal "DROP"
drop_data.num_dml_affected_rows.must_be :nil?
assert_table_ref drop_data.ddl_target_table, table_id_2, exists: false
end

def assert_table_ref table_ref, table_id, exists: true
table_ref.must_be_kind_of Google::Cloud::Bigquery::Table
table_ref.project_id.must_equal bigquery.project
table_ref.dataset_id.must_equal dataset_id
table_ref.table_id.must_equal table_id
table_ref.reference?.must_equal true
table_ref.exists?.must_equal exists
end
end
56 changes: 0 additions & 56 deletions google-cloud-bigquery/acceptance/bigquery/dataset_ddl_test.rb

This file was deleted.

141 changes: 136 additions & 5 deletions google-cloud-bigquery/lib/google/cloud/bigquery/data.rb
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,10 @@ class Data < DelegateClass(::Array)
# @private The Google API Client object in JSON Hash.
attr_accessor :gapi_json

##
# @private The query Job gapi object, or nil if from Table#data.
attr_accessor :job_gapi

# @private
def initialize arr = []
@service = nil
Expand Down Expand Up @@ -195,6 +199,130 @@ def headers
schema.headers
end

##
# The type of query statement, if valid. Possible values (new values
# might be added in the future):
#
# * "CREATE_MODEL": DDL statement, see [Using Data Definition Language
# Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language)
# * "CREATE_TABLE": DDL statement, see [Using Data Definition Language
# Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language)
# * "CREATE_TABLE_AS_SELECT": DDL statement, see [Using Data Definition
# Language Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language)
# * "CREATE_VIEW": DDL statement, see [Using Data Definition Language
# Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language)
# * "DELETE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax)
# * "DROP_MODEL": DDL statement, see [Using Data Definition Language
# Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language)
# * "DROP_TABLE": DDL statement, see [Using Data Definition Language
# Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language)
# * "DROP_VIEW": DDL statement, see [Using Data Definition Language
# Statements](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language)
# * "INSERT": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax)
# * "MERGE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax)
# * "SELECT": SQL query, see [Standard SQL Query Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax)
# * "UPDATE": DML statement, see [Data Manipulation Language Syntax](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax)
#
# @return [String, nil] The type of query statement.
#
def statement_type
return nil unless job_gapi && job_gapi.statistics.query
job_gapi.statistics.query.statement_type
end

##
# Whether the query that created this data was a DDL statement.
#
# @see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language
# Using Data Definition Language Statements
#
# @return [Boolean]
#
# @example
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# data = bigquery.query "CREATE TABLE my_table (x INT64)"
#
# data.statement_type #=> "CREATE_TABLE"
# data.ddl? #=> true
#
def ddl?
%w[CREATE_MODEL CREATE_TABLE CREATE_TABLE_AS_SELECT CREATE_VIEW \
DROP_MODEL DROP_TABLE DROP_VIEW].include? statement_type
end

##
# Whether the query that created this data was a DML statement.
#
# @see https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax
# Data Manipulation Language Syntax
#
# @return [Boolean]
#
# @example
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# data = bigquery.query "UPDATE my_table " \
# "SET x = x + 1 " \
# "WHERE x IS NOT NULL"
#
# data.statement_type #=> "UPDATE"
# data.dml? #=> true
#
def dml?
%w[INSERT UPDATE MERGE DELETE].include? statement_type
end

##
# The DDL operation performed, possibly dependent on the pre-existence
# of the DDL target. (See {#ddl_target_table}.) Possible values (new
# values might be added in the future):
#
# * "CREATE": The query created the DDL target.
# * "SKIP": No-op. Example cases: the query is
# `CREATE TABLE IF NOT EXISTS` while the table already exists, or the
# query is `DROP TABLE IF EXISTS` while the table does not exist.
# * "REPLACE": The query replaced the DDL target. Example case: the
# query is `CREATE OR REPLACE TABLE`, and the table already exists.
# * "DROP": The query deleted the DDL target.
#
# @return [String, nil] The DDL operation performed.
#
def ddl_operation_performed
return nil unless job_gapi && job_gapi.statistics.query
job_gapi.statistics.query.ddl_operation_performed
end

##
# The DDL target table, in reference state. (See {Table#reference?}.)
# Present only for `CREATE/DROP TABLE/VIEW` queries. (See
# {#statement_type}.)
#
# @return [Google::Cloud::Bigquery::Table, nil] The DDL target table, in
# reference state.
#
def ddl_target_table
return nil unless job_gapi && job_gapi.statistics.query
ensure_service!
table = job_gapi.statistics.query.ddl_target_table
return nil unless table
Google::Cloud::Bigquery::Table.new_reference_from_gapi table, service
end

##
# The number of rows affected by a DML statement. Present only for DML
# statements `INSERT`, `UPDATE` or `DELETE`. (See {#statement_type}.)
#
# @return [Integer, nil] The number of rows affected by a DML statement,
# or `nil` if the query is not a DML statement.
#
def num_dml_affected_rows
return nil unless job_gapi && job_gapi.statistics.query
job_gapi.statistics.query.num_dml_affected_rows
end

##
# Whether there is a next page of data.
#
Expand Down Expand Up @@ -252,7 +380,7 @@ def next
@table_gapi.table_reference.dataset_id,
@table_gapi.table_reference.table_id,
token: token
self.class.from_gapi_json data_json, @table_gapi, @service
self.class.from_gapi_json data_json, @table_gapi, job_gapi, @service
end

##
Expand Down Expand Up @@ -327,13 +455,16 @@ def all request_limit: nil

##
# @private New Data from a response object.
def self.from_gapi_json gapi_json, table_gapi, service
formatted_rows = Convert.format_rows(gapi_json[:rows],
table_gapi.schema.fields)
def self.from_gapi_json gapi_json, table_gapi, job_gapi, service
rows = gapi_json[:rows] || []
unless rows.empty?
rows = Convert.format_rows rows, table_gapi.schema.fields
end

data = new formatted_rows
data = new rows
data.table_gapi = table_gapi
data.gapi_json = gapi_json
data.job_gapi = job_gapi
data.service = service
data
end
Expand Down
Loading