Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(bigquery): Add session support #15699

Merged
merged 6 commits into from
Nov 16, 2021
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion google-cloud-bigquery/.rubocop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Style/Documentation:
Lint/MixedRegexpCaptureTypes:
Enabled: false
Metrics/AbcSize:
Max: 50
Max: 55
Metrics/BlockLength:
Exclude:
- "google-cloud-bigquery.gemspec"
Expand Down
19 changes: 19 additions & 0 deletions google-cloud-bigquery/acceptance/bigquery/advanced_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,25 @@
_(rows[2]).must_equal({ name: "Gandalf", spells_name: "Skydragon", spells_properties_name: "Explodey", spells_properties_power: 11.0 })
end

it "queries in session mode" do
job = bigquery.query_job "CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo", dataset: dataset, create_session: true
job.wait_until_done!
_(job).wont_be :failed?
_(job.session_id).wont_be :nil?

job_2 = bigquery.query_job "SELECT * FROM temptable", dataset: dataset, session_id: job.session_id
job_2.wait_until_done!
_(job_2).wont_be :failed?
_(job_2.session_id).wont_be :nil?
_(job_2.session_id).must_equal job.session_id
_(job_2.data.first).wont_be :nil?
_(job_2.data.first[:foo]).must_equal 17

data = bigquery.query "SELECT * FROM temptable", dataset: dataset, session_id: job.session_id
_(data.first).wont_be :nil?
_(data.first[:foo]).must_equal 17
end

it "modifies a nested schema via field" do
empty_table_id = "#{table_id}_empty"
empty_table = dataset.table empty_table_id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,7 @@
query_job = dataset.query_job query, job_id: job_id
_(query_job).must_be_kind_of Google::Cloud::Bigquery::QueryJob
_(query_job.job_id).must_equal job_id
_(query_job.session_id).must_be :nil?
query_job.wait_until_done!
_(query_job.done?).must_equal true
_(query_job.data.total).wont_be_nil
Expand Down
19 changes: 19 additions & 0 deletions google-cloud-bigquery/acceptance/bigquery/dataset_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -541,4 +541,23 @@

assert_data table.data(max: 1)
end

it "queries in session mode" do
job = dataset.query_job "CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo", create_session: true
job.wait_until_done!
_(job).wont_be :failed?
_(job.session_id).wont_be :nil?

job_2 = dataset.query_job "SELECT * FROM temptable", session_id: job.session_id
job_2.wait_until_done!
_(job_2).wont_be :failed?
_(job_2.session_id).wont_be :nil?
_(job_2.session_id).must_equal job.session_id
_(job_2.data.first).wont_be :nil?
_(job_2.data.first[:foo]).must_equal 17

data = dataset.query "SELECT * FROM temptable", session_id: job.session_id
_(data.first).wont_be :nil?
_(data.first[:foo]).must_equal 17
end
end
141 changes: 114 additions & 27 deletions google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb
Original file line number Diff line number Diff line change
Expand Up @@ -1244,6 +1244,12 @@ def routines token: nil, max: nil, filter: nil
# Flattens all nested and repeated fields in the query results. The
# default value is `true`. `large_results` parameter must be `true` if
# this is set to `false`.
# @param [Integer] maximum_billing_tier Limits the billing tier for this
# job. Queries that have resource usage beyond this tier will fail
# (without incurring a charge). WARNING: The billed byte amount can be
# multiplied by an amount up to this number! Most users should not need
# to alter this setting, and we recommend that you avoid introducing new
# uses of it. Deprecated.
# @param [Integer] maximum_bytes_billed Limits the bytes billed for this
# job. Queries that will have bytes billed beyond this limit will fail
# (without incurring a charge). Optional. If unspecified, this will be
Expand Down Expand Up @@ -1294,8 +1300,12 @@ def routines token: nil, max: nil, filter: nil
# For additional information on migrating, see: [Migrating to
# standard SQL - Differences in user-defined JavaScript
# functions](https://cloud.google.com/bigquery/docs/reference/standard-sql/migrating-from-legacy-sql#differences_in_user-defined_javascript_functions)
# @param [Integer] maximum_billing_tier Deprecated: Change the billing
# tier to allow high-compute queries.
# @param [Boolean] create_session If true, creates a new session, where the
# session ID will be a server generated random id. If false, runs query
# with an existing session ID when one is provided in the `session_id`
# param, otherwise runs query in non-session mode. See {Job#session_id}.
# @param [String] session_id The ID of an existing session. See also the
# `create_session` param and {Job#session_id}.
# @yield [job] a job configuration object
# @yieldparam [Google::Cloud::Bigquery::QueryJob::Updater] job a job
# configuration object for setting additional options for the query.
Expand Down Expand Up @@ -1371,8 +1381,7 @@ def routines token: nil, max: nil, filter: nil
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# job = dataset.query_job "SELECT name FROM my_table " \
# "WHERE id IN UNNEST(@ids)",
# job = dataset.query_job "SELECT name FROM my_table WHERE id IN UNNEST(@ids)",
# params: { ids: [] },
# types: { ids: [:INT64] }
#
Expand All @@ -1387,8 +1396,9 @@ def routines token: nil, max: nil, filter: nil
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# job = bigquery.query_job "CREATE TABLE my_table (x INT64)"
# job = dataset.query_job "CREATE TABLE my_table (x INT64)"
#
# job.wait_until_done!
# if !job.failed?
Expand All @@ -1399,16 +1409,28 @@ def routines token: nil, max: nil, filter: nil
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# job = bigquery.query_job "UPDATE my_table " \
# "SET x = x + 1 " \
# "WHERE x IS NOT NULL"
# job = dataset.query_job "UPDATE my_table SET x = x + 1 WHERE x IS NOT NULL"
#
# job.wait_until_done!
# if !job.failed?
# puts job.num_dml_affected_rows
# end
#
# @example Run query in a session:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# job = dataset.query_job "CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo", create_session: true
#
# job.wait_until_done!
#
# session_id = job.session_id
# data = dataset.query "SELECT * FROM temptable", session_id: session_id
#
# @example Query using external data source, set destination:
# require "google/cloud/bigquery"
#
Expand All @@ -1435,16 +1457,52 @@ def routines token: nil, max: nil, filter: nil
#
# @!group Data
#
def query_job query, params: nil, types: nil, external: nil, priority: "INTERACTIVE", cache: true, table: nil,
create: nil, write: nil, dryrun: nil, standard_sql: nil, legacy_sql: nil, large_results: nil,
flatten: nil, maximum_billing_tier: nil, maximum_bytes_billed: nil, job_id: nil, prefix: nil,
labels: nil, udfs: nil
def query_job query,
params: nil,
types: nil,
external: nil,
priority: "INTERACTIVE",
cache: true,
table: nil,
create: nil,
write: nil,
dryrun: nil,
standard_sql: nil,
legacy_sql: nil,
large_results: nil,
flatten: nil,
maximum_billing_tier: nil,
maximum_bytes_billed: nil,
job_id: nil,
prefix: nil,
labels: nil,
udfs: nil,
create_session: nil,
quartzmo marked this conversation as resolved.
Show resolved Hide resolved
session_id: nil
dazuma marked this conversation as resolved.
Show resolved Hide resolved
ensure_service!
options = { params: params, types: types, external: external, priority: priority, cache: cache, table: table,
create: create, write: write, dryrun: dryrun, standard_sql: standard_sql, legacy_sql: legacy_sql,
large_results: large_results, flatten: flatten, maximum_billing_tier: maximum_billing_tier,
maximum_bytes_billed: maximum_bytes_billed, job_id: job_id, prefix: prefix, labels: labels,
udfs: udfs }
options = {
params: params,
types: types,
external: external,
priority: priority,
cache: cache,
table: table,
create: create,
write: write,
dryrun: dryrun,
standard_sql: standard_sql,
legacy_sql: legacy_sql,
large_results: large_results,
flatten: flatten,
maximum_billing_tier: maximum_billing_tier,
maximum_bytes_billed: maximum_bytes_billed,
job_id: job_id,
prefix: prefix,
labels: labels,
udfs: udfs,
create_session: create_session,
session_id: session_id
}

updater = QueryJob::Updater.from_options service, query, options
updater.dataset = self
Expand Down Expand Up @@ -1566,6 +1624,8 @@ def query_job query, params: nil, types: nil, external: nil, priority: "INTERACT
# When set to false, the values of `large_results` and `flatten` are
# ignored; the query will be run as if `large_results` is true and
# `flatten` is false. Optional. The default value is false.
# @param [String] session_id The ID of an existing session. See the
# `create_session` param in {#query_job} and {Job#session_id}.
# @yield [job] a job configuration object
# @yieldparam [Google::Cloud::Bigquery::QueryJob::Updater] job a job
# configuration object for setting additional options for the query.
Expand Down Expand Up @@ -1641,8 +1701,7 @@ def query_job query, params: nil, types: nil, external: nil, priority: "INTERACT
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# data = dataset.query "SELECT name FROM my_table " \
# "WHERE id IN UNNEST(@ids)",
# data = dataset.query "SELECT name FROM my_table WHERE id IN UNNEST(@ids)",
# params: { ids: [] },
# types: { ids: [:INT64] }
#
Expand All @@ -1657,22 +1716,35 @@ def query_job query, params: nil, types: nil, external: nil, priority: "INTERACT
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# data = bigquery.query "CREATE TABLE my_table (x INT64)"
# data = dataset.query "CREATE TABLE my_table (x INT64)"
#
# table_ref = data.ddl_target_table # Or ddl_target_routine for CREATE/DROP FUNCTION/PROCEDURE
#
# @example Execute a DML statement:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# data = bigquery.query "UPDATE my_table " \
# "SET x = x + 1 " \
# "WHERE x IS NOT NULL"
# data = dataset.query "UPDATE my_table SET x = x + 1 WHERE x IS NOT NULL"
#
# puts data.num_dml_affected_rows
#
# @example Run query in a session:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
# dataset = bigquery.dataset "my_dataset"
#
# job = dataset.query_job "CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo", create_session: true
#
# job.wait_until_done!
#
# session_id = job.session_id
# data = dataset.query "SELECT * FROM temptable", session_id: session_id
#
# @example Query using external data source, set destination:
# require "google/cloud/bigquery"
#
Expand All @@ -1699,10 +1771,25 @@ def query_job query, params: nil, types: nil, external: nil, priority: "INTERACT
#
# @!group Data
#
def query query, params: nil, types: nil, external: nil, max: nil, cache: true,
standard_sql: nil, legacy_sql: nil, &block
job = query_job query, params: params, types: types, external: external, cache: cache,
standard_sql: standard_sql, legacy_sql: legacy_sql, &block
def query query,
params: nil,
types: nil,
external: nil,
max: nil,
cache: true,
standard_sql: nil,
legacy_sql: nil,
session_id: nil,
&block
job = query_job query,
params: params,
types: types,
external: external,
cache: cache,
standard_sql: standard_sql,
legacy_sql: legacy_sql,
session_id: session_id,
&block
job.wait_until_done!
ensure_job_succeeded! job

Expand Down
10 changes: 10 additions & 0 deletions google-cloud-bigquery/lib/google/cloud/bigquery/job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,16 @@ def reservation_usage
Array(@gapi.statistics.reservation_usage).map { |g| ReservationUsage.from_gapi g }
end

##
# The ID of the session if this job is part of one. See the `create_session` param in {Project#query_job} and
# {Dataset#query_job}.
#
# @return [String, nil] The session ID, or `nil` if not associated with a session.
#
def session_id
@gapi.statistics.session_info&.session_id
end

##
# The ID of a multi-statement transaction.
#
Expand Down
Loading