@@ -341,7 +341,9 @@ def generate_cloud_function_code(self, def_, dir, package_requirements=None):
341341 entry_point = self .generate_cloud_function_main_code (def_ , dir )
342342 return entry_point
343343
344- def create_cloud_function (self , def_ , cf_name , package_requirements = None ):
344+ def create_cloud_function (
345+ self , def_ , cf_name , package_requirements = None , cloud_function_timeout = 600
346+ ):
345347 """Create a cloud function from the given user defined function."""
346348
347349 # Build and deploy folder structure containing cloud function
@@ -409,7 +411,14 @@ def create_cloud_function(self, def_, cf_name, package_requirements=None):
409411 )
410412 function .service_config = functions_v2 .ServiceConfig ()
411413 function .service_config .available_memory = "1024M"
412- function .service_config .timeout_seconds = 600
414+ if cloud_function_timeout is not None :
415+ if cloud_function_timeout > 1200 :
416+ raise ValueError (
417+ "BigQuery remote function can wait only up to 20 minutes"
418+ ", see for more details "
419+ "https://cloud.google.com/bigquery/quotas#remote_function_limits."
420+ )
421+ function .service_config .timeout_seconds = cloud_function_timeout
413422 function .service_config .service_account_email = (
414423 self ._cloud_function_service_account
415424 )
@@ -456,6 +465,7 @@ def provision_bq_remote_function(
456465 name ,
457466 package_requirements ,
458467 max_batching_rows ,
468+ cloud_function_timeout ,
459469 ):
460470 """Provision a BigQuery remote function."""
461471 # If reuse of any existing function with the same name (indicated by the
@@ -477,7 +487,7 @@ def provision_bq_remote_function(
477487 # Create the cloud function if it does not exist
478488 if not cf_endpoint :
479489 cf_endpoint = self .create_cloud_function (
480- def_ , cloud_function_name , package_requirements
490+ def_ , cloud_function_name , package_requirements , cloud_function_timeout
481491 )
482492 else :
483493 logger .info (f"Cloud function { cloud_function_name } already exists." )
@@ -631,6 +641,7 @@ def remote_function(
631641 cloud_function_kms_key_name : Optional [str ] = None ,
632642 cloud_function_docker_repository : Optional [str ] = None ,
633643 max_batching_rows : Optional [int ] = 1000 ,
644+ cloud_function_timeout : Optional [int ] = 600 ,
634645):
635646 """Decorator to turn a user defined function into a BigQuery remote function.
636647
@@ -756,6 +767,16 @@ def remote_function(
756767 `None` can be passed to let BQ remote functions service apply
757768 default batching. See for more details
758769 https://cloud.google.com/bigquery/docs/remote-functions#limiting_number_of_rows_in_a_batch_request.
770+ cloud_function_timeout (int, Optional):
771+ The maximum amount of time (in seconds) BigQuery should wait for
772+ the cloud function to return a response. See for more details
773+ https://cloud.google.com/functions/docs/configuring/timeout.
774+ Please note that even though the cloud function (2nd gen) itself
775+ allows seeting up to 60 minutes of timeout, BigQuery remote
776+ function can wait only up to 20 minutes, see for more details
777+ https://cloud.google.com/bigquery/quotas#remote_function_limits.
778+ By default BigQuery DataFrames uses a 10 minute timeout. `None`
779+ can be passed to let the cloud functions default timeout take effect.
759780 """
760781 import bigframes .pandas as bpd
761782
@@ -880,6 +901,7 @@ def wrapper(f):
880901 name ,
881902 packages ,
882903 max_batching_rows ,
904+ cloud_function_timeout ,
883905 )
884906
885907 # TODO: Move ibis logic to compiler step
0 commit comments