@google-cloud/bigquery
Advanced tools
Comparing version 5.9.1 to 5.9.2
@@ -161,3 +161,3 @@ /*! | ||
* `Table`, etc.), we are going to be using a dataset from | ||
* [data.gov](http://goo.gl/f2SXcb) of higher education institutions. | ||
* {@link http://goo.gl/f2SXcb| data.gov} of higher education institutions. | ||
* | ||
@@ -169,16 +169,23 @@ * We will create a table with the correct schema, import the public CSV file | ||
* | ||
* @see [What is BigQuery?]{@link https://cloud.google.com/bigquery/what-is-bigquery} | ||
* See {@link https://cloud.google.com/bigquery/what-is-bigquery| What is BigQuery?} | ||
* | ||
* @param {BigQueryOptions} options Constructor options. | ||
* | ||
* @example <caption>Install the client library with <a href="https://www.npmjs.com/">npm</a>:</caption> | ||
* @example Install the client library with <a href="https://www.npmjs.com/">npm</a>: | ||
* ``` | ||
* npm install @google-cloud/bigquery | ||
* | ||
* @example <caption>Import the client library</caption> | ||
* ``` | ||
* @example Import the client library | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* | ||
* @example <caption>Create a client that uses <a href="https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application">Application Default Credentials (ADC)</a>:</caption> | ||
* ``` | ||
* @example Create a client that uses <a href="https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application">Application Default Credentials (ADC)</a>: | ||
* ``` | ||
* const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>Create a client with <a href="https://cloud.google.com/docs/authentication/production#obtaining_and_providing_service_account_credentials_manually">explicit credentials</a>:</caption> | ||
* ``` | ||
* @example Create a client with <a href="https://cloud.google.com/docs/authentication/production#obtaining_and_providing_service_account_credentials_manually">explicit credentials</a>: | ||
* ``` | ||
* const bigquery = new BigQuery({ | ||
@@ -189,2 +196,3 @@ * projectId: 'your-project-id', | ||
* | ||
* ``` | ||
* @example <caption>include:samples/quickstart.js</caption> | ||
@@ -234,2 +242,3 @@ * region_tag:bigquery_quickstart | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -247,2 +256,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -260,2 +270,3 @@ static date(value: BigQueryDateOptions | string): BigQueryDate; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -272,2 +283,3 @@ * const date = BigQuery.date('2017-01-01'); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -295,2 +307,3 @@ date(value: BigQueryDateOptions | string): BigQueryDate; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -310,2 +323,3 @@ * const datetime = BigQuery.datetime('2017-01-01 13:00:00'); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -331,2 +345,3 @@ /** | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -347,2 +362,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -366,2 +382,3 @@ static datetime(value: BigQueryDatetimeOptions | string): BigQueryDatetime; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -378,2 +395,3 @@ * const time = BigQuery.time('14:00:00'); // 2:00 PM | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -394,2 +412,3 @@ /** | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -407,2 +426,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -419,4 +439,6 @@ static time(value: BigQueryTimeOptions | string): BigQueryTime; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const timestamp = BigQuery.timestamp(new Date()); | ||
* ``` | ||
*/ | ||
@@ -432,5 +454,7 @@ /** | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const timestamp = bigquery.timestamp(new Date()); | ||
* ``` | ||
*/ | ||
@@ -449,2 +473,3 @@ static timestamp(value: Date | string): BigQueryTimestamp; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -463,2 +488,3 @@ * const bigquery = new BigQuery(); | ||
* // customValue is the value returned from your `integerTypeCastFunction`. | ||
* ``` | ||
*/ | ||
@@ -475,4 +501,6 @@ static int(value: string | number | IntegerTypeCastValue, typeCastOptions?: IntegerTypeCastOptions): BigQueryInt; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const geography = BigQuery.geography('POINT(1, 2)'); | ||
* ``` | ||
*/ | ||
@@ -488,5 +516,7 @@ /** | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const geography = bigquery.geography('POINT(1, 2)'); | ||
* ``` | ||
*/ | ||
@@ -509,3 +539,3 @@ static geography(value: string): Geography; | ||
* | ||
* @see [Data Type]{@link https://cloud.google.com/bigquery/data-types} | ||
* See {@link https://cloud.google.com/bigquery/data-types| Data Type} | ||
* | ||
@@ -523,3 +553,3 @@ * @param {*} providedType The type. | ||
* | ||
* @see [Data Type]{@link https://cloud.google.com/bigquery/data-types} | ||
* See {@link https://cloud.google.com/bigquery/data-types| Data Type} | ||
* | ||
@@ -535,3 +565,3 @@ * @param {*} value The value. | ||
* | ||
* @see [Jobs.query API Reference Docs (see `queryParameters`)]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#request-body} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#request-body| Jobs.query API Reference Docs (see `queryParameters`)} | ||
* | ||
@@ -546,7 +576,198 @@ * @param {*} value The value. | ||
private static _isCustomType; | ||
/** | ||
* @callback DatasetCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} dataset The [dataset resource]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource}. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* Create a dataset. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/insert| Datasets: insert API Documentation} | ||
* | ||
* @param {string} id ID of the dataset to create. | ||
* @param {object} [options] See a | ||
* {@link https://cloud.google.com/bigquery/docs/reference/v2/datasets#resource| Dataset resource}. | ||
* @param {DatasetCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Dataset} callback.dataset The newly created dataset | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<Dataset>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* bigquery.createDataset('my-dataset', function(err, dataset, apiResponse) | ||
* {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* bigquery.createDataset('my-dataset').then(function(data) { | ||
* const dataset = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
createDataset(id: string, options?: DatasetResource): Promise<DatasetResponse>; | ||
createDataset(id: string, options: DatasetResource, callback: DatasetCallback): void; | ||
createDataset(id: string, callback: DatasetCallback): void; | ||
/** | ||
* @callback JobCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} job The newly created job for your query. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* Run a query as a job. No results are immediately returned. Instead, your | ||
* callback will be executed with a {@link Job} object that you must | ||
* ping for the results. See the Job documentation for explanations of how to | ||
* check on the status of the job. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {object|string} options The configuration object. This must be in | ||
* the format of the {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery| `configuration.query`} | ||
* property of a Jobs resource. If a string is provided, this is used as the | ||
* query string, and all other options are defaulted. | ||
* @param {Table} [options.destination] The table to save the | ||
* query's results to. If omitted, a new table will be created. | ||
* @param {boolean} [options.dryRun] If set, don't actually run this job. A | ||
* valid query will update the job with processing statistics. These can | ||
* be accessed via `job.metadata`. | ||
* @param {object} [options.labels] String key/value pairs to be attached as | ||
* labels to the newly created Job. | ||
* @param {string} [options.location] The geographic location of the job. | ||
* Required except for US and EU. | ||
* @param {number} [options.jobTimeoutMs] Job timeout in milliseconds. | ||
* If this time limit is exceeded, BigQuery might attempt to stop the job. | ||
* @param {string} [options.jobId] Custom job id. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the job id. | ||
* @param {string} options.query A query string, following the BigQuery query | ||
* syntax, of the query to execute. | ||
* @param {boolean} [options.useLegacySql=false] Option to use legacy sql syntax. | ||
* @param {object} [options.defaultDataset] The dataset. This must be in | ||
* the format of the {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#DatasetReference| `DatasetReference`} | ||
* @param {boolean} [options.wrapIntegers] Optionally wrap INT64 in BigQueryInt | ||
* or custom INT64 value type. | ||
* @param {object|array} [options.params] Option to provide query prarameters. | ||
* @param {JobCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {Job} callback.job The newly created job for your query. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @throws {Error} If a query is not specified. | ||
* @throws {Error} If a Table is not provided as a destination. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT | ||
* 100'; | ||
* | ||
* //- | ||
* // You may pass only a query string, having a new table created to store | ||
* the | ||
* // results of the query. | ||
* //- | ||
* bigquery.createQueryJob(query, function(err, job) {}); | ||
* | ||
* //- | ||
* // You can also control the destination table by providing a | ||
* // {@link Table} object. | ||
* //- | ||
* bigquery.createQueryJob({ | ||
* destination: bigquery.dataset('higher_education').table('institutions'), | ||
* query: query | ||
* }, function(err, job) {}); | ||
* | ||
* //- | ||
* // After you have run `createQueryJob`, your query will execute in a job. | ||
* Your | ||
* // callback is executed with a {@link Job} object so that you may | ||
* // check for the results. | ||
* //- | ||
* bigquery.createQueryJob(query, function(err, job) { | ||
* if (!err) { | ||
* job.getQueryResults(function(err, rows, apiResponse) {}); | ||
* } | ||
* }); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* bigquery.createQueryJob(query).then(function(data) { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* | ||
* return job.getQueryResults(); | ||
* }); | ||
* ``` | ||
*/ | ||
createQueryJob(options: Query | string): Promise<JobResponse>; | ||
createQueryJob(options: Query | string, callback: JobCallback): void; | ||
/** | ||
* Creates a job. Typically when creating a job you'll have a very specific | ||
* task in mind. For this we recommend one of the following methods: | ||
* | ||
* - {@link BigQuery#createQueryJob} | ||
* - {@link Table#createCopyJob} | ||
* - {@link Table#createCopyFromJob} | ||
* - {@link Table#createExtractJob} | ||
* - {@link Table#createLoadJob} | ||
* | ||
* However in the event you need a finer level of control over the job | ||
* creation, you can use this method to pass in a raw {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job| Job resource} | ||
* object. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs| Jobs Overview} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {object} options Object in the form of a {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job| Job resource}; | ||
* @param {string} [options.jobId] Custom job id. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the job id. | ||
* @param {string} [options.location] The geographic location of the job. | ||
* Required except for US and EU. | ||
* @param {JobCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {Job} callback.job The newly created job. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* const options = { | ||
* configuration: { | ||
* query: { | ||
* query: 'SELECT url FROM `publicdata.samples.github_nested` LIMIT 100' | ||
* } | ||
* } | ||
* }; | ||
* | ||
* bigquery.createJob(options, function(err, job) { | ||
* if (err) { | ||
* // Error handling omitted. | ||
* } | ||
* | ||
* job.getQueryResults(function(err, rows) {}); | ||
* }); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* bigquery.createJob(options).then(function(data) { | ||
* const job = data[0]; | ||
* | ||
* return job.getQueryResults(); | ||
* }); | ||
* ``` | ||
*/ | ||
createJob(options: JobOptions): Promise<JobResponse>; | ||
@@ -564,10 +785,131 @@ createJob(options: JobOptions, callback: JobCallback): void; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('higher_education'); | ||
* ``` | ||
*/ | ||
dataset(id: string, options?: DatasetOptions): Dataset; | ||
/** | ||
* List all or some of the datasets in your project. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/list| Datasets: list API Documentation} | ||
* | ||
* @param {object} [options] Configuration object. | ||
* @param {boolean} [options.all] List all datasets, including hidden ones. | ||
* @param {boolean} [options.autoPaginate] Have pagination handled automatically. | ||
* Default: true. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {DatasetsCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Dataset[]} callback.datasets The list of datasets in your project. | ||
* @returns {Promise<DatasetsResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* bigquery.getDatasets(function(err, datasets) { | ||
* if (!err) { | ||
* // datasets is an array of Dataset objects. | ||
* } | ||
* }); | ||
* | ||
* //- | ||
* // To control how many API requests are made and page through the results | ||
* // manually, set `autoPaginate` to `false`. | ||
* //- | ||
* function manualPaginationCallback(err, datasets, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* bigquery.getDatasets(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* bigquery.getDatasets({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* bigquery.getDatasets().then(function(datasets) {}); | ||
* ``` | ||
*/ | ||
getDatasets(options?: GetDatasetsOptions): Promise<DatasetsResponse>; | ||
getDatasets(options: GetDatasetsOptions, callback: DatasetsCallback): void; | ||
getDatasets(callback: DatasetsCallback): void; | ||
/** | ||
* @callback GetJobsCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} jobs An array of [Job]{@link https://cloud.google.com/bigquery/docs/reference/v2/Job} objects. | ||
*/ | ||
/** | ||
* @typedef {array} GetJobsResponse | ||
* @property {object} 0 An array of Job objects. | ||
*/ | ||
/** | ||
* Get all of the jobs from your project. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/list| Jobs: list API Documentation} | ||
* | ||
* @param {object} [options] Configuration object. | ||
* @param {boolean} [options.allUsers] Display jobs owned by all users in the | ||
* project. | ||
* @param {boolean} [options.autoPaginate] Have pagination handled | ||
* automatically. Default: true. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {string} [options.projection] Restrict information returned to a set | ||
* of selected fields. Acceptable values are "full", for all job data, and | ||
* "minimal", to not include the job configuration. | ||
* @param {string} [options.stateFilter] Filter for job state. Acceptable | ||
* values are "done", "pending", and "running". Sending an array to this | ||
* option performs a disjunction. | ||
* @param {GetJobsCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Job[]} callback.jobs The list of jobs in your | ||
* project. | ||
* @returns {Promise<GetJobsResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* bigquery.getJobs(function(err, jobs) { | ||
* if (!err) { | ||
* // jobs is an array of Job objects. | ||
* } | ||
* }); | ||
* | ||
* //- | ||
* // To control how many API requests are made and page through the results | ||
* // manually, set `autoPaginate` to `false`. | ||
* //- | ||
* function manualPaginationCallback(err, jobs, nextQuery, apiRespose) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* bigquery.getJobs(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* bigquery.getJobs({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* bigquery.getJobs().then(function(data) { | ||
* const jobs = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
getJobs(options?: GetJobsOptions): Promise<GetJobsResponse>; | ||
@@ -586,2 +928,3 @@ getJobs(options: GetJobsOptions, callback: GetJobsCallback): void; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -591,4 +934,122 @@ * const bigquery = new BigQuery(); | ||
* const myExistingJob = bigquery.job('job-id'); | ||
* ``` | ||
*/ | ||
job(id: string, options?: JobOptions): Job; | ||
/** | ||
* Run a query scoped to your project. For manual pagination please refer to | ||
* {@link BigQuery#createQueryJob}. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/query| Jobs: query API Documentation} | ||
* | ||
* @param {string|object} query A string SQL query or configuration object. | ||
* For all available options, see | ||
* {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#request-body| Jobs: query request body}. | ||
* @param {string} [query.location] The geographic location of the job. | ||
* Required except for US and EU. | ||
* @param {string} [query.jobId] Custom id for the underlying job. | ||
* @param {string} [query.jobPrefix] Prefix to apply to the underlying job id. | ||
* @param {object|Array<*>} query.params For positional SQL parameters, provide | ||
* an array of values. For named SQL parameters, provide an object which | ||
* maps each named parameter to its value. The supported types are | ||
* integers, floats, {@link BigQuery#date} objects, {@link BigQuery#datetime} | ||
* objects, {@link BigQuery#time} objects, {@link BigQuery#timestamp} | ||
* objects, Strings, Booleans, and Objects. | ||
* @param {string} query.query A query string, following the BigQuery query | ||
* syntax, of the query to execute. | ||
* @param {object|Array<*>} query.types Provided types for query parameters. | ||
* For positional SQL parameters, provide an array of types. For named | ||
* SQL parameters, provide an object which maps each named parameter to | ||
* its type. | ||
* @param {boolean} [query.useLegacySql=false] Option to use legacy sql syntax. | ||
* @param {object} [options] Configuration object for query results. | ||
* @param {number} [options.maxResults] Maximum number of results to read. | ||
* @param {number} [options.timeoutMs] How long to wait for the query to | ||
* complete, in milliseconds, before returning. Default is 10 seconds. | ||
* If the timeout passes before the job completes, an error will be returned | ||
* and the 'jobComplete' field in the response will be false. | ||
* @param {boolean|IntegerTypeCastOptions} [options.wrapIntegers=false] Wrap values | ||
* of 'INT64' type in {@link BigQueryInt} objects. | ||
* If a `boolean`, this will wrap values in {@link BigQueryInt} objects. | ||
* If an `object`, this will return a value returned by | ||
* `wrapIntegers.integerTypeCastFunction`. | ||
* Please see {@link IntegerTypeCastOptions} for options descriptions. | ||
* @param {function} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {array} callback.rows The list of results from your query. | ||
* @returns {Promise} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT | ||
* 100'; | ||
* | ||
* bigquery.query(query, function(err, rows) { | ||
* if (!err) { | ||
* // rows is an array of results. | ||
* } | ||
* }); | ||
* | ||
* //- | ||
* // Positional SQL parameters are supported. | ||
* //- | ||
* bigquery.query({ | ||
* query: [ | ||
* 'SELECT url', | ||
* 'FROM `publicdata.samples.github_nested`', | ||
* 'WHERE repository.owner = ?' | ||
* ].join(' '), | ||
* | ||
* params: [ | ||
* 'google' | ||
* ] | ||
* }, function(err, rows) {}); | ||
* | ||
* //- | ||
* // Or if you prefer to name them, that's also supported. | ||
* //- | ||
* bigquery.query({ | ||
* query: [ | ||
* 'SELECT url', | ||
* 'FROM `publicdata.samples.github_nested`', | ||
* 'WHERE repository.owner = @owner' | ||
* ].join(' '), | ||
* params: { | ||
* owner: 'google' | ||
* } | ||
* }, function(err, rows) {}); | ||
* | ||
* //- | ||
* // Providing types for SQL parameters is supported. | ||
* //- | ||
* bigquery.query({ | ||
* query: [ | ||
* 'SELECT url', | ||
* 'FROM `publicdata.samples.github_nested`', | ||
* 'WHERE repository.owner = ?' | ||
* ].join(' '), | ||
* | ||
* params: [ | ||
* null | ||
* ], | ||
* | ||
* types: ['string'] | ||
* }, function(err, rows) {}); | ||
* | ||
* //- | ||
* // If you need to use a `DATE`, `DATETIME`, `TIME`, or `TIMESTAMP` type in | ||
* // your query, see {@link BigQuery#date}, {@link BigQuery#datetime}, | ||
* // {@link BigQuery#time}, and {@link BigQuery#timestamp}. | ||
* //- | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* bigquery.query(query).then(function(data) { | ||
* const rows = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
query(query: string, options?: QueryOptions): Promise<QueryRowsResponse>; | ||
@@ -657,5 +1118,7 @@ query(query: Query, options?: QueryOptions): Promise<SimpleQueryRowsResponse>; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const anInt = bigquery.int(7); | ||
* ``` | ||
*/ | ||
@@ -662,0 +1125,0 @@ export declare class BigQueryInt extends Number { |
@@ -58,5 +58,7 @@ /*! | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* ``` | ||
*/ | ||
@@ -70,2 +72,14 @@ declare class Dataset extends ServiceObject { | ||
constructor(bigQuery: BigQuery, id: string, options?: DatasetOptions); | ||
/** | ||
* Run a query as a job. No results are immediately returned. Instead, your | ||
* callback will be executed with a {@link Job} object that you must | ||
* ping for the results. See the Job documentation for explanations of how to | ||
* check on the status of the job. | ||
* | ||
* See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
* | ||
* @param {object} options See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
* @param {JobCallback} [callback] See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
* @returns {Promise<JobResponse>} See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
*/ | ||
createQueryJob(options: string | Query): Promise<JobResponse>; | ||
@@ -84,16 +98,399 @@ createQueryJob(options: string | Query, callback: JobCallback): void; | ||
createQueryStream(options: Query | string): Duplex; | ||
/** | ||
* @callback CreateRoutineCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Routine} routine The newly created routine. | ||
* @param {object} response The full API response body. | ||
*/ | ||
/** | ||
* @typedef {array} CreateRoutineResponse | ||
* @property {Routine} 0 The newly created routine. | ||
* @property {object} 1 The full API response body. | ||
*/ | ||
/** | ||
* Create a {@link Routine}. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert| Routines: insert API Documentation} | ||
* | ||
* @param {string} id The routine ID. | ||
* @param {object} config A [routine resource]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine}. | ||
* @param {CreateRoutineCallback} [callback] The callback function. | ||
* @returns {Promise<CreateRoutineResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* | ||
* const id = 'my-routine'; | ||
* const config = { | ||
* arguments: [{ | ||
* name: 'x', | ||
* dataType: { | ||
* typeKind: 'INT64' | ||
* } | ||
* }], | ||
* definitionBody: 'x * 3', | ||
* routineType: 'SCALAR_FUNCTION', | ||
* returnType: { | ||
* typeKind: 'INT64' | ||
* } | ||
* }; | ||
* | ||
* dataset.createRoutine(id, config, (err, routine, apiResponse) => { | ||
* if (!err) { | ||
* // The routine was created successfully. | ||
* } | ||
* }); | ||
* | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [routine, apiResponse] = await dataset.createRoutine(id, config); | ||
* ``` | ||
*/ | ||
createRoutine(id: string, config: RoutineMetadata): Promise<RoutineResponse>; | ||
createRoutine(id: string, config: RoutineMetadata, callback: RoutineCallback): void; | ||
/** | ||
* @callback TableCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Table} table The table. | ||
* @param {object} apiResponse The full API response body. | ||
*/ | ||
/** | ||
* @typedef {array} TableResponse | ||
* @property {Table} 0 The table. | ||
* @property {object} 1 The full API response body. | ||
*/ | ||
/** | ||
* Create a {@link Table} given a tableId or configuration object. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/tables/insert| Tables: insert API Documentation} | ||
* | ||
* @param {string} id Table id. | ||
* @param {object} [options] See a | ||
* {@link https://cloud.google.com/bigquery/docs/reference/v2/tables#resource| Table resource}. | ||
* @param {string|object} [options.schema] A comma-separated list of name:type | ||
* pairs. Valid types are "string", "integer", "float", "boolean", and | ||
* "timestamp". If the type is omitted, it is assumed to be "string". | ||
* Example: "name:string, age:integer". Schemas can also be specified as a | ||
* JSON array of fields, which allows for nested and repeated fields. See | ||
* a {@link http://goo.gl/sl8Dmg| Table resource} for more detailed information. | ||
* @param {TableCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Table} callback.table The newly created table. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<TableResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* const tableId = 'institution_data'; | ||
* | ||
* const options = { | ||
* // From the data.gov CSV dataset (http://goo.gl/kSE7z6): | ||
* schema: 'UNITID,INSTNM,ADDR,CITY,STABBR,ZIP,FIPS,OBEREG,CHFNM,...' | ||
* }; | ||
* | ||
* dataset.createTable(tableId, options, (err, table, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* dataset.createTable(tableId, options).then((data) => { | ||
* const table = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
createTable(id: string, options: TableMetadata): Promise<TableResponse>; | ||
createTable(id: string, options: TableMetadata, callback: TableCallback): void; | ||
createTable(id: string, callback: TableCallback): void; | ||
/** | ||
* @callback DeleteCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} apiResponse The full API response body. | ||
*/ | ||
/** | ||
* @typedef {array} Metadata | ||
* @property {object} 0 The full API response body. | ||
*/ | ||
/** | ||
* Delete the dataset. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/delete| Datasets: delete API Documentation} | ||
* | ||
* @param {object} [options] The configuration object. | ||
* @param {boolean} [options.force=false] Force delete dataset and all tables. | ||
* @param {DeleteCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<Metadata>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* //- | ||
* // Delete the dataset, only if it does not have any tables. | ||
* //- | ||
* dataset.delete((err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // Delete the dataset and any tables it contains. | ||
* //- | ||
* dataset.delete({ force: true }, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* dataset.delete().then((data) => { | ||
* const apiResponse = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
delete(options?: DatasetDeleteOptions): Promise<[Metadata]>; | ||
delete(options: DatasetDeleteOptions, callback: DeleteCallback): void; | ||
delete(callback: DeleteCallback): void; | ||
/** | ||
* @typedef {object} GetModelsOptions | ||
* @property {boolean} [autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @property {number} [maxApiCalls] Maximum number of API calls to make. | ||
* @property {number} [maxResults] Maximum number of results to return. | ||
* @property {string} [pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
*/ | ||
/** | ||
* @callback GetModelsCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Model[]} models List of model objects. | ||
* @param {GetModelsOptions} nextQuery If `autoPaginate` is set to true, | ||
* this will be a prepared query for the next page of results. | ||
* @param {object} response The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} GetModelsResponse | ||
* @property {Model[]} 0 A list of the dataset's {@link Model} objects. | ||
* @property {GetModelsOptions} 1 If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @property {object} 2 The full API response. | ||
*/ | ||
/** | ||
* Get a list of {@link Model} resources. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/list| Models: list API Documentation} | ||
* | ||
* @param {GetModelsOptions} [options] Configuration object. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {GetModelsCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Model[]} callback.models The list of models from | ||
* your Dataset. | ||
* @param {GetModelsOptions} callback.nextQuery If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<GetModelsResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* dataset.getModels((err, models) => { | ||
* // models is an array of `Model` objects. | ||
* }); | ||
* | ||
* ``` | ||
* @example To control how many API requests are made and page through the results manually, set `autoPaginate` to `false`. | ||
* ``` | ||
* function manualPaginationCallback(err, models, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* dataset.getModels(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* dataset.getModels({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* ``` | ||
* @example If the callback is omitted, we'll return a Promise. | ||
* ``` | ||
* dataset.getModels().then((data) => { | ||
* const models = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
getModels(options?: GetModelsOptions): Promise<GetModelsResponse>; | ||
getModels(options: GetModelsOptions, callback: GetModelsCallback): void; | ||
getModels(callback: GetModelsCallback): void; | ||
/** | ||
* @typedef {object} GetRoutinesOptions | ||
* @property {boolean} [autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @property {number} [maxApiCalls] Maximum number of API calls to make. | ||
* @property {number} [maxResults] Maximum number of results to return. | ||
* @property {string} [pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
*/ | ||
/** | ||
* @callback GetRoutinesCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Routine[]} routines List of routine objects. | ||
* @param {GetRoutinesOptions} nextQuery If `autoPaginate` is set to true, | ||
* this will be a prepared query for the next page of results. | ||
* @param {object} response The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} GetRoutinesResponse | ||
* @property {Routine[]} 0 List of {@link Routine} objects. | ||
* @property {GetRoutinesOptions} 1 If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @property {object} 2 The full API response. | ||
*/ | ||
/** | ||
* Get a list of routines. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/list| Routines: list API Documentation} | ||
* | ||
* @param {GetRoutinesOptions} [options] Request options. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {GetRoutinesCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Routine[]} callback.routines The list of models from | ||
* your Dataset. | ||
* @param {GetRoutinesOptions} callback.nextQuery If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<GetRoutinesResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* dataset.getRoutines((err, routines) => { | ||
* // routines is an array of `Routine` objects. | ||
* }); | ||
* | ||
* ``` | ||
* @example To control how many API requests are made and page through the results manually, set `autoPaginate` to `false`. | ||
* ``` | ||
* function manualPaginationCallback(err, routines, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* dataset.getRoutines(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* dataset.getRoutines({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [routines] = await dataset.getRoutines(); | ||
* ``` | ||
*/ | ||
getRoutines(options?: GetRoutinesOptions): Promise<GetRoutinesResponse>; | ||
getRoutines(options: GetRoutinesOptions, callback: GetRoutinesCallback): void; | ||
getRoutines(callback: GetRoutinesCallback): void; | ||
/** | ||
* @typedef {object} GetTablesOptions | ||
* @property {boolean} [autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @property {number} [maxApiCalls] Maximum number of API calls to make. | ||
* @property {number} [maxResults] Maximum number of results to return. | ||
* @property {string} [pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
*/ | ||
/** | ||
* @callback GetTablesCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Table[]} tables List of {@link Table} objects. | ||
* @param {GetTablesOptions} nextQuery If `autoPaginate` is set to true, | ||
* this will be a prepared query for the next page of results. | ||
* @param {object} response The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} GetTablesResponse | ||
* @property {Table[]} 0 List of {@link Table} objects. | ||
* @property {GetTablesOptions} 1 If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @property {object} 2 The full API response. | ||
*/ | ||
/** | ||
* Get a list of {@link Table} resources. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/tables/list| Tables: list API Documentation} | ||
* | ||
* @param {GetTablesOptions} options Configuration object. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {GetTablesCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Table[]} callback.tables The list of tables from | ||
* your Dataset. | ||
* @param {GetTablesOptions} callback.nextQuery If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<GetTablesResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* dataset.getTables((err, tables) => { | ||
* // tables is an array of `Table` objects. | ||
* }); | ||
* | ||
* //- | ||
* // To control how many API requests are made and page through the results | ||
* // manually, set `autoPaginate` to `false`. | ||
* //- | ||
* function manualPaginationCallback(err, tables, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* dataset.getTables(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* dataset.getTables({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* dataset.getTables().then((data) => { | ||
* const tables = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
getTables(options?: GetTablesOptions): Promise<GetTablesResponse>; | ||
@@ -111,2 +508,3 @@ getTables(options: GetTablesOptions, callback: GetTablesCallback): void; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -117,4 +515,15 @@ * const bigquery = new BigQuery(); | ||
* const model = dataset.model('my-model'); | ||
* ``` | ||
*/ | ||
model(id: string): Model; | ||
/** | ||
* Run a query scoped to your dataset. | ||
* | ||
* See {@link BigQuery#query} for full documentation of this method. | ||
* | ||
* @param {object} options See {@link BigQuery#query} for full documentation of this method. | ||
* @param {function} [callback] See {@link BigQuery#query} for full documentation of this method. | ||
* @returns {Promise<SimpleQueryRowsResponse>} | ||
* @returns {Promise<QueryRowsResponse>} See {@link BigQuery#query} for full documentation of this method. | ||
*/ | ||
query(options: Query): Promise<QueryRowsResponse>; | ||
@@ -133,2 +542,3 @@ query(options: string): Promise<QueryRowsResponse>; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -139,2 +549,3 @@ * const bigquery = new BigQuery(); | ||
* const routine = dataset.routine('my_routine'); | ||
* ``` | ||
*/ | ||
@@ -158,2 +569,3 @@ routine(id: string): Routine; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -164,2 +576,3 @@ * const bigquery = new BigQuery(); | ||
* const institutions = dataset.table('institution_data'); | ||
* ``` | ||
*/ | ||
@@ -166,0 +579,0 @@ table(id: string, options?: TableOptions): Table; |
@@ -38,5 +38,7 @@ "use strict"; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* ``` | ||
*/ | ||
@@ -69,2 +71,3 @@ class Dataset extends common_1.ServiceObject { | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -86,2 +89,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -109,2 +113,3 @@ create: true, | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -121,2 +126,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -155,2 +161,3 @@ exists: true, | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -172,2 +179,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -189,3 +197,3 @@ get: true, | ||
* | ||
* @see [Datasets: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/get} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/get| Datasets: get API Documentation} | ||
* | ||
@@ -201,2 +209,3 @@ * @method Dataset#getMetadata | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -214,2 +223,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -229,3 +239,3 @@ getMetadata: true, | ||
* | ||
* @see [Datasets: patch API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/patch} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/patch| Datasets: patch API Documentation} | ||
* | ||
@@ -241,2 +251,3 @@ * @method Dataset#setMetadata | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -258,2 +269,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -301,2 +313,3 @@ setMetadata: true, | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -313,3 +326,5 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If you anticipate many results, you can end a stream early to prevent unnecessary processing and API requests.</caption> | ||
* ``` | ||
* @example If you anticipate many results, you can end a stream early to prevent unnecessary processing and API requests. | ||
* ``` | ||
* dataset.getModelsStream() | ||
@@ -319,2 +334,3 @@ * .on('data', function(model) { | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -331,2 +347,3 @@ this.getModelsStream = paginator_1.paginator.streamify('getModels'); | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -343,3 +360,5 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If you anticipate many results, you can end a stream early to prevent unnecessary processing and API requests.</caption> | ||
* ``` | ||
* @example If you anticipate many results, you can end a stream early to prevent unnecessary processing and API requests. | ||
* ``` | ||
* dataset.getRoutinesStream() | ||
@@ -349,2 +368,3 @@ * .on('data', function(routine) { | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -362,2 +382,3 @@ this.getRoutinesStream = paginator_1.paginator.streamify('getRoutines'); | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -382,17 +403,6 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
this.getTablesStream = paginator_1.paginator.streamify('getTables'); | ||
} | ||
/** | ||
* Run a query as a job. No results are immediately returned. Instead, your | ||
* callback will be executed with a {@link Job} object that you must | ||
* ping for the results. See the Job documentation for explanations of how to | ||
* check on the status of the job. | ||
* | ||
* See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
* | ||
* @param {object} options See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
* @param {JobCallback} [callback] See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
* @returns {Promise<JobResponse>} See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
*/ | ||
createQueryJob(options, callback) { | ||
@@ -436,52 +446,2 @@ if (typeof options === 'string') { | ||
} | ||
/** | ||
* @callback CreateRoutineCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Routine} routine The newly created routine. | ||
* @param {object} response The full API response body. | ||
*/ | ||
/** | ||
* @typedef {array} CreateRoutineResponse | ||
* @property {Routine} 0 The newly created routine. | ||
* @property {object} 1 The full API response body. | ||
*/ | ||
/** | ||
* Create a {@link Routine}. | ||
* | ||
* @see [Routines: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert} | ||
* | ||
* @param {string} id The routine ID. | ||
* @param {object} config A [routine resource]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine}. | ||
* @param {CreateRoutineCallback} [callback] The callback function. | ||
* @returns {Promise<CreateRoutineResponse>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* | ||
* const id = 'my-routine'; | ||
* const config = { | ||
* arguments: [{ | ||
* name: 'x', | ||
* dataType: { | ||
* typeKind: 'INT64' | ||
* } | ||
* }], | ||
* definitionBody: 'x * 3', | ||
* routineType: 'SCALAR_FUNCTION', | ||
* returnType: { | ||
* typeKind: 'INT64' | ||
* } | ||
* }; | ||
* | ||
* dataset.createRoutine(id, config, (err, routine, apiResponse) => { | ||
* if (!err) { | ||
* // The routine was created successfully. | ||
* } | ||
* }); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* const [routine, apiResponse] = await dataset.createRoutine(id, config); | ||
*/ | ||
createRoutine(id, config, callback) { | ||
@@ -509,56 +469,2 @@ const json = Object.assign({}, config, { | ||
} | ||
/** | ||
* @callback TableCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Table} table The table. | ||
* @param {object} apiResponse The full API response body. | ||
*/ | ||
/** | ||
* @typedef {array} TableResponse | ||
* @property {Table} 0 The table. | ||
* @property {object} 1 The full API response body. | ||
*/ | ||
/** | ||
* Create a {@link Table} given a tableId or configuration object. | ||
* | ||
* @see [Tables: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tables/insert} | ||
* | ||
* @param {string} id Table id. | ||
* @param {object} [options] See a | ||
* [Table | ||
* resource](https://cloud.google.com/bigquery/docs/reference/v2/tables#resource). | ||
* @param {string|object} [options.schema] A comma-separated list of name:type | ||
* pairs. Valid types are "string", "integer", "float", "boolean", and | ||
* "timestamp". If the type is omitted, it is assumed to be "string". | ||
* Example: "name:string, age:integer". Schemas can also be specified as a | ||
* JSON array of fields, which allows for nested and repeated fields. See | ||
* a [Table resource](http://goo.gl/sl8Dmg) for more detailed information. | ||
* @param {TableCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Table} callback.table The newly created table. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<TableResponse>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* const tableId = 'institution_data'; | ||
* | ||
* const options = { | ||
* // From the data.gov CSV dataset (http://goo.gl/kSE7z6): | ||
* schema: 'UNITID,INSTNM,ADDR,CITY,STABBR,ZIP,FIPS,OBEREG,CHFNM,...' | ||
* }; | ||
* | ||
* dataset.createTable(tableId, options, (err, table, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* dataset.createTable(tableId, options).then((data) => { | ||
* const table = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
*/ | ||
createTable(id, optionsOrCallback, cb) { | ||
@@ -590,45 +496,2 @@ const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; | ||
} | ||
/** | ||
* @callback DeleteCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} apiResponse The full API response body. | ||
*/ | ||
/** | ||
* @typedef {array} Metadata | ||
* @property {object} 0 The full API response body. | ||
*/ | ||
/** | ||
* Delete the dataset. | ||
* | ||
* @see [Datasets: delete API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/datasets/delete} | ||
* | ||
* @param {object} [options] The configuration object. | ||
* @param {boolean} [options.force=false] Force delete dataset and all tables. | ||
* @param {DeleteCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<Metadata>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* //- | ||
* // Delete the dataset, only if it does not have any tables. | ||
* //- | ||
* dataset.delete((err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // Delete the dataset and any tables it contains. | ||
* //- | ||
* dataset.delete({ force: true }, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* dataset.delete().then((data) => { | ||
* const apiResponse = data[0]; | ||
* }); | ||
*/ | ||
delete(optionsOrCallback, callback) { | ||
@@ -647,73 +510,2 @@ const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; | ||
} | ||
/** | ||
* @typedef {object} GetModelsOptions | ||
* @property {boolean} [autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @property {number} [maxApiCalls] Maximum number of API calls to make. | ||
* @property {number} [maxResults] Maximum number of results to return. | ||
* @property {string} [pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
*/ | ||
/** | ||
* @callback GetModelsCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Model[]} models List of model objects. | ||
* @param {GetModelsOptions} nextQuery If `autoPaginate` is set to true, | ||
* this will be a prepared query for the next page of results. | ||
* @param {object} response The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} GetModelsResponse | ||
* @property {Model[]} 0 A list of the dataset's {@link Model} objects. | ||
* @property {GetModelsOptions} 1 If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @property {object} 2 The full API response. | ||
*/ | ||
/** | ||
* Get a list of {@link Model} resources. | ||
* | ||
* @see [Models: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/list} | ||
* | ||
* @param {GetModelsOptions} [options] Configuration object. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {GetModelsCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Model[]} callback.models The list of models from | ||
* your Dataset. | ||
* @param {GetModelsOptions} callback.nextQuery If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<GetModelsResponse>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* dataset.getModels((err, models) => { | ||
* // models is an array of `Model` objects. | ||
* }); | ||
* | ||
* @example <caption>To control how many API requests are made and page through the results manually, set `autoPaginate` to `false`.</caption> | ||
* function manualPaginationCallback(err, models, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* dataset.getModels(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* dataset.getModels({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* @example <caption>If the callback is omitted, we'll return a Promise.</caption> | ||
* dataset.getModels().then((data) => { | ||
* const models = data[0]; | ||
* }); | ||
*/ | ||
getModels(optsOrCb, cb) { | ||
@@ -744,71 +536,2 @@ const options = typeof optsOrCb === 'object' ? optsOrCb : {}; | ||
} | ||
/** | ||
* @typedef {object} GetRoutinesOptions | ||
* @property {boolean} [autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @property {number} [maxApiCalls] Maximum number of API calls to make. | ||
* @property {number} [maxResults] Maximum number of results to return. | ||
* @property {string} [pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
*/ | ||
/** | ||
* @callback GetRoutinesCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Routine[]} routines List of routine objects. | ||
* @param {GetRoutinesOptions} nextQuery If `autoPaginate` is set to true, | ||
* this will be a prepared query for the next page of results. | ||
* @param {object} response The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} GetRoutinesResponse | ||
* @property {Routine[]} 0 List of {@link Routine} objects. | ||
* @property {GetRoutinesOptions} 1 If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @property {object} 2 The full API response. | ||
*/ | ||
/** | ||
* Get a list of routines. | ||
* | ||
* @see [Routines: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/list} | ||
* | ||
* @param {GetRoutinesOptions} [options] Request options. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {GetRoutinesCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Routine[]} callback.routines The list of models from | ||
* your Dataset. | ||
* @param {GetRoutinesOptions} callback.nextQuery If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<GetRoutinesResponse>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* dataset.getRoutines((err, routines) => { | ||
* // routines is an array of `Routine` objects. | ||
* }); | ||
* | ||
* @example <caption>To control how many API requests are made and page through the results manually, set `autoPaginate` to `false`.</caption> | ||
* function manualPaginationCallback(err, routines, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* dataset.getRoutines(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* dataset.getRoutines({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* const [routines] = await dataset.getRoutines(); | ||
*/ | ||
getRoutines(optsOrCb, cb) { | ||
@@ -839,77 +562,2 @@ const options = typeof optsOrCb === 'object' ? optsOrCb : {}; | ||
} | ||
/** | ||
* @typedef {object} GetTablesOptions | ||
* @property {boolean} [autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @property {number} [maxApiCalls] Maximum number of API calls to make. | ||
* @property {number} [maxResults] Maximum number of results to return. | ||
* @property {string} [pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
*/ | ||
/** | ||
* @callback GetTablesCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {Table[]} tables List of {@link Table} objects. | ||
* @param {GetTablesOptions} nextQuery If `autoPaginate` is set to true, | ||
* this will be a prepared query for the next page of results. | ||
* @param {object} response The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} GetTablesResponse | ||
* @property {Table[]} 0 List of {@link Table} objects. | ||
* @property {GetTablesOptions} 1 If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @property {object} 2 The full API response. | ||
*/ | ||
/** | ||
* Get a list of {@link Table} resources. | ||
* | ||
* @see [Tables: list API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/tables/list} | ||
* | ||
* @param {GetTablesOptions} options Configuration object. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to return. | ||
* @param {string} [options.pageToken] Token returned from a previous call, to | ||
* request the next page of results. | ||
* @param {GetTablesCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Table[]} callback.tables The list of tables from | ||
* your Dataset. | ||
* @param {GetTablesOptions} callback.nextQuery If `autoPaginate` is set to true, this | ||
* will be a prepared query for the next page of results. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<GetTablesResponse>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('institutions'); | ||
* | ||
* dataset.getTables((err, tables) => { | ||
* // tables is an array of `Table` objects. | ||
* }); | ||
* | ||
* //- | ||
* // To control how many API requests are made and page through the results | ||
* // manually, set `autoPaginate` to `false`. | ||
* //- | ||
* function manualPaginationCallback(err, tables, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* dataset.getTables(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* dataset.getTables({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* dataset.getTables().then((data) => { | ||
* const tables = data[0]; | ||
* }); | ||
*/ | ||
getTables(optionsOrCallback, cb) { | ||
@@ -952,2 +600,3 @@ const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -958,2 +607,3 @@ * const bigquery = new BigQuery(); | ||
* const model = dataset.model('my-model'); | ||
* ``` | ||
*/ | ||
@@ -966,12 +616,2 @@ model(id) { | ||
} | ||
/** | ||
* Run a query scoped to your dataset. | ||
* | ||
* See {@link BigQuery#query} for full documentation of this method. | ||
* | ||
* @param {object} options See {@link BigQuery#query} for full documentation of this method. | ||
* @param {function} [callback] See {@link BigQuery#query} for full documentation of this method. | ||
* @returns {Promise<SimpleQueryRowsResponse>} | ||
* @returns {Promise<QueryRowsResponse>} See {@link BigQuery#query} for full documentation of this method. | ||
*/ | ||
query(options, callback) { | ||
@@ -1000,2 +640,3 @@ if (typeof options === 'string') { | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -1006,2 +647,3 @@ * const bigquery = new BigQuery(); | ||
* const routine = dataset.routine('my_routine'); | ||
* ``` | ||
*/ | ||
@@ -1030,2 +672,3 @@ routine(id) { | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -1036,2 +679,3 @@ * const bigquery = new BigQuery(); | ||
* const institutions = dataset.table('institution_data'); | ||
* ``` | ||
*/ | ||
@@ -1038,0 +682,0 @@ table(id, options) { |
@@ -70,2 +70,3 @@ /*! | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -99,2 +100,3 @@ * const bigquery = new BigQuery(); | ||
* job.removeAllListeners(); | ||
* ``` | ||
*/ | ||
@@ -106,4 +108,121 @@ declare class Job extends Operation { | ||
constructor(bigQuery: BigQuery, id: string, options?: JobOptions); | ||
/** | ||
* @callback CancelCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} metadata The job metadata. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} CancelResponse | ||
* @property {object} 0 The job metadata. | ||
* @property {object} 1 The full API response. | ||
*/ | ||
/** | ||
* Cancel a job. Use {@link Job#getMetadata} to see if the cancel | ||
* completes successfully. See an example implementation below. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/cancel| Jobs: get API Documentation} | ||
* | ||
* @param {CancelCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<CancelResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* const job = bigquery.job('job-id'); | ||
* | ||
* job.cancel((err, apiResponse) =>{ | ||
* // Check to see if the job completes successfully. | ||
* job.on('error', (err) => {}); | ||
* job.on('complete', (metadata) => {}); | ||
* }); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* job.cancel().then((data) => { | ||
* const apiResponse = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
cancel(): Promise<CancelResponse>; | ||
cancel(callback: CancelCallback): void; | ||
/** | ||
* Get the results of a job. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults| Jobs: getQueryResults API Documentation} | ||
* | ||
* @param {object} [options] Configuration object. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to read. | ||
* @param {string} [options.pageToken] Page token, returned by a previous call, | ||
* to request the next page of results. Note: This is automatically added | ||
* to the `nextQuery` argument of your callback. | ||
* @param {number} [options.startIndex] Zero-based index of the starting row. | ||
* @param {number} [options.timeoutMs] How long to wait for the query to | ||
* complete, in milliseconds, before returning. Default is 10 seconds. | ||
* If the timeout passes before the job completes, an error will be returned | ||
* and the 'jobComplete' field in the response will be false. | ||
* @param {boolean|IntegerTypeCastOptions} [options.wrapIntegers=false] Wrap values | ||
* of 'INT64' type in {@link BigQueryInt} objects. | ||
* If a `boolean`, this will wrap values in {@link BigQueryInt} objects. | ||
* If an `object`, this will return a value returned by | ||
* `wrapIntegers.integerTypeCastFunction`. | ||
* @param {QueryResultsCallback|ManualQueryResultsCallback} [callback] The | ||
* callback function. If `autoPaginate` is set to false a | ||
* {@link ManualQueryResultsCallback} should be used. | ||
* @returns {Promise<QueryResultsCallback>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* const job = bigquery.job('job-id'); | ||
* | ||
* //- | ||
* // Get all of the results of a query. | ||
* //- | ||
* job.getQueryResults((err, rows) => { | ||
* if (!err) { | ||
* // rows is an array of results. | ||
* } | ||
* }); | ||
* | ||
* //- | ||
* // Customize the results you want to fetch. | ||
* //- | ||
* job.getQueryResults({ | ||
* maxResults: 100 | ||
* }, (err, rows) => {}); | ||
* | ||
* //- | ||
* // To control how many API requests are made and page through the results | ||
* // manually, set `autoPaginate` to `false`. | ||
* //- | ||
* function manualPaginationCallback(err, rows, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* job.getQueryResults(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* job.getQueryResults({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* job.getQueryResults().then((data) => { | ||
* const rows = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
getQueryResults(options?: QueryResultsOptions): Promise<QueryRowsResponse>; | ||
@@ -110,0 +229,0 @@ getQueryResults(options: QueryResultsOptions, callback: QueryRowsCallback): void; |
@@ -65,2 +65,3 @@ "use strict"; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -94,2 +95,3 @@ * const bigquery = new BigQuery(); | ||
* job.removeAllListeners(); | ||
* ``` | ||
*/ | ||
@@ -120,2 +122,3 @@ class Job extends common_1.Operation { | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -134,2 +137,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -152,2 +156,5 @@ exists: true, | ||
* @method Job#get | ||
* @param {object} [options] Configuration object. | ||
* @param {string} [options.location] The geographic location of the job. | ||
* Required except for US and EU. | ||
* @param {GetJobCallback} [callback] The callback function. | ||
@@ -160,2 +167,3 @@ * @param {?error} callback.err An error returned while making this | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -179,2 +187,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -197,3 +206,3 @@ get: true, | ||
* | ||
* @see [Jobs: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/get} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/get| Jobs: get API Documentation} | ||
* | ||
@@ -209,2 +218,3 @@ * @method Job#getMetadata | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -223,2 +233,3 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
@@ -261,2 +272,3 @@ getMetadata: { | ||
* @example | ||
* ``` | ||
* const through2 = require('through2'); | ||
@@ -275,46 +287,6 @@ * const fs = require('fs'); | ||
* .pipe(fs.createWriteStream('./test/testdata/testfile.json')); | ||
* ``` | ||
*/ | ||
this.getQueryResultsStream = paginator_1.paginator.streamify('getQueryResultsAsStream_'); | ||
} | ||
/** | ||
* @callback CancelCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} metadata The job metadata. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} CancelResponse | ||
* @property {object} 0 The job metadata. | ||
* @property {object} 1 The full API response. | ||
*/ | ||
/** | ||
* Cancel a job. Use {@link Job#getMetadata} to see if the cancel | ||
* completes successfully. See an example implementation below. | ||
* | ||
* @see [Jobs: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/cancel} | ||
* | ||
* @param {CancelCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<CancelResponse>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* const job = bigquery.job('job-id'); | ||
* | ||
* job.cancel((err, apiResponse) =>{ | ||
* // Check to see if the job completes successfully. | ||
* job.on('error', (err) => {}); | ||
* job.on('complete', (metadata) => {}); | ||
* }); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* job.cancel().then((data) => { | ||
* const apiResponse = data[0]; | ||
* }); | ||
*/ | ||
cancel(callback) { | ||
@@ -331,74 +303,2 @@ let qs; | ||
} | ||
/** | ||
* Get the results of a job. | ||
* | ||
* @see [Jobs: getQueryResults API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults} | ||
* | ||
* @param {object} [options] Configuration object. | ||
* @param {boolean} [options.autoPaginate=true] Have pagination handled | ||
* automatically. | ||
* @param {number} [options.maxApiCalls] Maximum number of API calls to make. | ||
* @param {number} [options.maxResults] Maximum number of results to read. | ||
* @param {string} [options.pageToken] Page token, returned by a previous call, | ||
* to request the next page of results. Note: This is automatically added | ||
* to the `nextQuery` argument of your callback. | ||
* @param {number} [options.startIndex] Zero-based index of the starting row. | ||
* @param {number} [options.timeoutMs] How long to wait for the query to | ||
* complete, in milliseconds, before returning. Default is 10 seconds. | ||
* If the timeout passes before the job completes, an error will be returned | ||
* and the 'jobComplete' field in the response will be false. | ||
* @param {boolean|IntegerTypeCastOptions} [options.wrapIntegers=false] Wrap values | ||
* of 'INT64' type in {@link BigQueryInt} objects. | ||
* If a `boolean`, this will wrap values in {@link BigQueryInt} objects. | ||
* If an `object`, this will return a value returned by | ||
* `wrapIntegers.integerTypeCastFunction`. | ||
* @param {QueryResultsCallback|ManualQueryResultsCallback} [callback] The | ||
* callback function. If `autoPaginate` is set to false a | ||
* {@link ManualQueryResultsCallback} should be used. | ||
* @returns {Promise<QueryResultsCallback>} | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* | ||
* const job = bigquery.job('job-id'); | ||
* | ||
* //- | ||
* // Get all of the results of a query. | ||
* //- | ||
* job.getQueryResults((err, rows) => { | ||
* if (!err) { | ||
* // rows is an array of results. | ||
* } | ||
* }); | ||
* | ||
* //- | ||
* // Customize the results you want to fetch. | ||
* //- | ||
* job.getQueryResults({ | ||
* maxResults: 100 | ||
* }, (err, rows) => {}); | ||
* | ||
* //- | ||
* // To control how many API requests are made and page through the results | ||
* // manually, set `autoPaginate` to `false`. | ||
* //- | ||
* function manualPaginationCallback(err, rows, nextQuery, apiResponse) { | ||
* if (nextQuery) { | ||
* // More results exist. | ||
* job.getQueryResults(nextQuery, manualPaginationCallback); | ||
* } | ||
* } | ||
* | ||
* job.getQueryResults({ | ||
* autoPaginate: false | ||
* }, manualPaginationCallback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* job.getQueryResults().then((data) => { | ||
* const rows = data[0]; | ||
* }); | ||
*/ | ||
getQueryResults(optionsOrCallback, cb) { | ||
@@ -405,0 +305,0 @@ const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; |
@@ -43,2 +43,3 @@ /*! | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -49,2 +50,3 @@ * const bigquery = new BigQuery(); | ||
* const model = dataset.model('my-model'); | ||
* ``` | ||
*/ | ||
@@ -55,5 +57,156 @@ declare class Model extends common.ServiceObject { | ||
constructor(dataset: Dataset, id: string); | ||
/** | ||
* @callback JobCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} Job The [Job]{@link https://cloud.google.com/bigquery/docs/reference/v2/Job} resource. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} JobResponse | ||
* @property {object} 0 The [Job]{@link https://cloud.google.com/bigquery/docs/reference/v2/Job} resource. | ||
* @property {object} 1 The full API response. | ||
*/ | ||
/** | ||
* Export model to Cloud Storage. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {string|File} destination Where the model should be exported | ||
* to. A string or {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* @param {object} [options] The configuration object. For all extract job options, see [CreateExtractJobOptions]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract}. | ||
* @param {string} [options.format] The format to export the data in. | ||
* Allowed options are "ML_TF_SAVED_MODEL" or "ML_XGBOOST_BOOSTER". | ||
* Default: "ML_TF_SAVED_MODEL". | ||
* @param {string} [options.jobId] Custom job id. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the job id. | ||
* @param {JobCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {Job} callback.job The job used to export the model. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @throws {Error} If a destination isn't a string or File object. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const model = dataset.model('my-model'); | ||
* | ||
* const extractedModel = 'gs://my-bucket/extracted-model'; | ||
* | ||
* function callback(err, job, apiResponse) { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* } | ||
* | ||
* //- | ||
* // To use the default options, just pass a string or a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* // | ||
* // Note: The default format is 'ML_TF_SAVED_MODEL'. | ||
* //- | ||
* model.createExtractJob(extractedModel, callback); | ||
* | ||
* //- | ||
* // If you need more customization, pass an `options` object. | ||
* //- | ||
* const options = { | ||
* format: 'ML_TF_SAVED_MODEL', | ||
* jobId: '123abc' | ||
* }; | ||
* | ||
* model.createExtractJob(extractedModel, options, callback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* model.createExtractJob(extractedModel, options).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
createExtractJob(destination: string | File, options?: CreateExtractJobOptions): Promise<JobResponse>; | ||
createExtractJob(destination: string | File, options: CreateExtractJobOptions, callback: JobCallback): void; | ||
createExtractJob(destination: string | File, callback: JobCallback): void; | ||
/** | ||
* @callback JobMetadataCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} metadata The job metadata. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} JobMetadataResponse | ||
* @property {object} 0 The job metadata. | ||
* @property {object} 1 The full API response. | ||
*/ | ||
/** | ||
* Export model to Cloud Storage. | ||
* | ||
* @param {string|File} destination Where the model should be exported | ||
* to. A string or {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* @param {object} [options] The configuration object. For all extract job options, see [CreateExtractJobOptions]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract}. | ||
* @param {string} [options.format] The format to export | ||
* the data in. Allowed options are "ML_TF_SAVED_MODEL" or | ||
* "ML_XGBOOST_BOOSTER". Default: "ML_TF_SAVED_MODEL". | ||
* @param {string} [options.jobId] Custom id for the underlying job. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the underlying job id. | ||
* @param {JobMetadataCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobMetadataResponse>} | ||
* | ||
* @throws {Error} If destination isn't a string or File object. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const model = dataset.model('my-model'); | ||
* | ||
* const extractedModel = 'gs://my-bucket/extracted-model'; | ||
* | ||
* | ||
* //- | ||
* function callback(err, job, apiResponse) { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* } | ||
* | ||
* //- | ||
* // To use the default options, just pass a string or a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* // | ||
* // Note: The default format is 'ML_TF_SAVED_MODEL'. | ||
* //- | ||
* model.createExtractJob(extractedModel, callback); | ||
* | ||
* //- | ||
* // If you need more customization, pass an `options` object. | ||
* //- | ||
* const options = { | ||
* format: 'ML_TF_SAVED_MODEL', | ||
* jobId: '123abc' | ||
* }; | ||
* | ||
* model.createExtractJob(extractedModel, options, callback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* model.createExtractJob(extractedModel, options).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
extract(destination: string | File, options?: CreateExtractJobOptions): Promise<JobMetadataResponse>; | ||
@@ -60,0 +213,0 @@ extract(destination: string | File, options: CreateExtractJobOptions, callback?: JobMetadataCallback): void; |
@@ -39,2 +39,3 @@ "use strict"; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -45,2 +46,3 @@ * const bigquery = new BigQuery(); | ||
* const model = dataset.model('my-model'); | ||
* ``` | ||
*/ | ||
@@ -58,3 +60,3 @@ class Model extends common.ServiceObject { | ||
* | ||
* @see [Models: delete API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/delete} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/delete| Models: delete API Documentation} | ||
* | ||
@@ -69,2 +71,3 @@ * @method Model#delete | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -77,5 +80,10 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted we'll return a Promise.</caption> | ||
* ``` | ||
* @example If the callback is omitted we'll return a Promise. | ||
* ``` | ||
* const [apiResponse] = await model.delete(); | ||
* @example <caption>If successful, the response body is empty.</caption> | ||
* ``` | ||
* @example If successful, the response body is empty. | ||
* ``` | ||
* ``` | ||
*/ | ||
@@ -103,2 +111,3 @@ delete: true, | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -111,4 +120,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted we'll return a Promise.</caption> | ||
* ``` | ||
* @example If the callback is omitted we'll return a Promise. | ||
* ``` | ||
* const [exists] = await model.exists(); | ||
* ``` | ||
*/ | ||
@@ -130,3 +142,3 @@ exists: true, | ||
* | ||
* @see [Models: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/get} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/get| Models: get API Documentation} | ||
* | ||
@@ -142,2 +154,3 @@ * @method Model#get: | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -154,4 +167,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted we'll return a Promise.</caption> | ||
* ``` | ||
* @example If the callback is omitted we'll return a Promise. | ||
* ``` | ||
* await model.get(); | ||
* ``` | ||
*/ | ||
@@ -173,3 +189,3 @@ get: true, | ||
* | ||
* @see [Models: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/get} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/get| Models: get API Documentation} | ||
* | ||
@@ -185,2 +201,3 @@ * @method Model#getMetadata | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -193,4 +210,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted we'll return a Promise.</caption> | ||
* ``` | ||
* @example If the callback is omitted we'll return a Promise. | ||
* ``` | ||
* const [metadata, apiResponse] = await model.getMetadata(); | ||
* ``` | ||
*/ | ||
@@ -210,3 +230,3 @@ getMetadata: true, | ||
/** | ||
* @see [Models: patch API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/patch} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/models/patch| Models: patch API Documentation} | ||
* | ||
@@ -223,2 +243,3 @@ * @method Model#setMetadata | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -235,4 +256,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted we'll return a Promise.</caption> | ||
* ``` | ||
* @example If the callback is omitted we'll return a Promise. | ||
* ``` | ||
* const [metadata, apiResponse] = await model.setMetadata(metadata); | ||
* ``` | ||
*/ | ||
@@ -250,76 +274,2 @@ setMetadata: true, | ||
} | ||
/** | ||
* @callback JobCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} Job The [Job]{@link https://cloud.google.com/bigquery/docs/reference/v2/Job} resource. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} JobResponse | ||
* @property {object} 0 The [Job]{@link https://cloud.google.com/bigquery/docs/reference/v2/Job} resource. | ||
* @property {object} 1 The full API response. | ||
*/ | ||
/** | ||
* Export model to Cloud Storage. | ||
* | ||
* @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} | ||
* | ||
* @param {string|File} destination Where the model should be exported | ||
* to. A string or {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* @param {object} [options] The configuration object. For all extract job options, see [CreateExtractJobOptions]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract}. | ||
* @param {string} [options.format] The format to export the data in. | ||
* Allowed options are "ML_TF_SAVED_MODEL" or "ML_XGBOOST_BOOSTER". | ||
* Default: "ML_TF_SAVED_MODEL". | ||
* @param {string} [options.jobId] Custom job id. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the job id. | ||
* @param {JobCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {Job} callback.job The job used to export the model. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @throws {Error} If a destination isn't a string or File object. | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const model = dataset.model('my-model'); | ||
* | ||
* const extractedModel = 'gs://my-bucket/extracted-model'; | ||
* | ||
* function callback(err, job, apiResponse) { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* } | ||
* | ||
* //- | ||
* // To use the default options, just pass a string or a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* // | ||
* // Note: The default format is 'ML_TF_SAVED_MODEL'. | ||
* //- | ||
* model.createExtractJob(extractedModel, callback); | ||
* | ||
* //- | ||
* // If you need more customization, pass an `options` object. | ||
* //- | ||
* const options = { | ||
* format: 'ML_TF_SAVED_MODEL', | ||
* jobId: '123abc' | ||
* }; | ||
* | ||
* model.createExtractJob(extractedModel, options, callback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* model.createExtractJob(extractedModel, options).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
*/ | ||
createExtractJob(destination, optionsOrCallback, cb) { | ||
@@ -371,75 +321,2 @@ let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; | ||
} | ||
/** | ||
* @callback JobMetadataCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} metadata The job metadata. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} JobMetadataResponse | ||
* @property {object} 0 The job metadata. | ||
* @property {object} 1 The full API response. | ||
*/ | ||
/** | ||
* Export model to Cloud Storage. | ||
* | ||
* @param {string|File} destination Where the model should be exported | ||
* to. A string or {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* @param {object} [options] The configuration object. For all extract job options, see [CreateExtractJobOptions]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract}. | ||
* @param {string} [options.format] The format to export | ||
* the data in. Allowed options are "ML_TF_SAVED_MODEL" or | ||
* "ML_XGBOOST_BOOSTER". Default: "ML_TF_SAVED_MODEL". | ||
* @param {string} [options.jobId] Custom id for the underlying job. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the underlying job id. | ||
* @param {JobMetadataCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobMetadataResponse>} | ||
* | ||
* @throws {Error} If destination isn't a string or File object. | ||
* | ||
* @example | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const model = dataset.model('my-model'); | ||
* | ||
* const extractedModel = 'gs://my-bucket/extracted-model'; | ||
* | ||
* | ||
* //- | ||
* function callback(err, job, apiResponse) { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* } | ||
* | ||
* //- | ||
* // To use the default options, just pass a string or a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* // | ||
* // Note: The default format is 'ML_TF_SAVED_MODEL'. | ||
* //- | ||
* model.createExtractJob(extractedModel, callback); | ||
* | ||
* //- | ||
* // If you need more customization, pass an `options` object. | ||
* //- | ||
* const options = { | ||
* format: 'ML_TF_SAVED_MODEL', | ||
* jobId: '123abc' | ||
* }; | ||
* | ||
* model.createExtractJob(extractedModel, options, callback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* model.createExtractJob(extractedModel, options).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
*/ | ||
extract(destination, optionsOrCallback, cb) { | ||
@@ -446,0 +323,0 @@ const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; |
@@ -28,2 +28,3 @@ /*! | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -34,2 +35,3 @@ * const bigquery = new BigQuery(); | ||
* const routine = dataset.routine('my_routine'); | ||
* ``` | ||
*/ | ||
@@ -36,0 +38,0 @@ declare class Routine extends common.ServiceObject { |
@@ -32,2 +32,3 @@ "use strict"; | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -38,2 +39,3 @@ * const bigquery = new BigQuery(); | ||
* const routine = dataset.routine('my_routine'); | ||
* ``` | ||
*/ | ||
@@ -46,3 +48,3 @@ class Routine extends common.ServiceObject { | ||
* | ||
* @see [Routines: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert| Routines: insert API Documentation} | ||
* | ||
@@ -55,2 +57,3 @@ * @method Routine#create | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -81,4 +84,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [routine, apiResponse] = await routine.create(config); | ||
* ``` | ||
*/ | ||
@@ -98,3 +104,3 @@ create: true, | ||
* | ||
* @see [Routines: delete API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/delete} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/delete| Routines: delete API Documentation} | ||
* | ||
@@ -106,2 +112,3 @@ * @method Routine#delete | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -118,4 +125,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [apiResponse] = await routine.delete(); | ||
* ``` | ||
*/ | ||
@@ -140,2 +150,3 @@ delete: true, | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -148,4 +159,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [exists] = await routine.exists(); | ||
* ``` | ||
*/ | ||
@@ -167,3 +181,3 @@ exists: true, | ||
* | ||
* @see [Routines: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/get} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/get| Routines: get API Documentation} | ||
* | ||
@@ -175,2 +189,3 @@ * @method Routine#get | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -183,4 +198,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [routine2] = await routine.get(); | ||
* ``` | ||
*/ | ||
@@ -202,3 +220,3 @@ get: true, | ||
* | ||
* @see [Routines: get API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/get} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/get| Routines: get API Documentation} | ||
* | ||
@@ -210,2 +228,3 @@ * @method Routine#getMetadata | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -218,4 +237,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [metadata, apiResponse] = await routine.getMetadata(); | ||
* ``` | ||
*/ | ||
@@ -237,3 +259,3 @@ getMetadata: true, | ||
* | ||
* @see [Routines: update API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/update} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/update| Routines: update API Documentation} | ||
* | ||
@@ -246,2 +268,3 @@ * @method Routine#setMetadata | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -258,4 +281,7 @@ * const bigquery = new BigQuery(); | ||
* | ||
* @example <caption>If the callback is omitted a Promise will be returned</caption> | ||
* ``` | ||
* @example If the callback is omitted a Promise will be returned | ||
* ``` | ||
* const [metadata, apiResponse] = await routine.setMetadata(updates); | ||
* ``` | ||
*/ | ||
@@ -262,0 +288,0 @@ setMetadata: { |
@@ -109,2 +109,3 @@ /*! | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -115,2 +116,3 @@ * const bigquery = new BigQuery(); | ||
* const table = dataset.table('my-table'); | ||
* ``` | ||
*/ | ||
@@ -148,17 +150,419 @@ declare class Table extends common.ServiceObject { | ||
static formatMetadata_(options: TableMetadata): FormattedMetadata; | ||
/** | ||
* @callback JobMetadataCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} JobMetadataResponse | ||
* @property {object} 0 The full API response. | ||
*/ | ||
/** | ||
* Copy data from one table to another, optionally creating that table. | ||
* | ||
* @param {Table} destination The destination table. | ||
* @param {object} [metadata] Metadata to set with the copy operation. The | ||
* metadata object should be in the format of a | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`} | ||
* object. | ||
* object. | ||
* @param {string} [metadata.jobId] Custom id for the underlying job. | ||
* @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job | ||
* id. | ||
* @param {JobMetadataCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobMetadataResponse>} | ||
* | ||
* @throws {Error} If a destination other than a Table object is provided. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* | ||
* const table = dataset.table('my-table'); | ||
* const yourTable = dataset.table('your-table'); | ||
* | ||
* table.copy(yourTable, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy | ||
* // for all available options. | ||
* //- | ||
* const metadata = { | ||
* createDisposition: 'CREATE_NEVER', | ||
* writeDisposition: 'WRITE_TRUNCATE' | ||
* }; | ||
* | ||
* table.copy(yourTable, metadata, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.copy(yourTable, metadata).then((data) => { | ||
* const apiResponse = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
copy(destination: Table, metadata?: CopyTableMetadata): Promise<JobMetadataResponse>; | ||
copy(destination: Table, metadata: CopyTableMetadata, callback: JobMetadataCallback): void; | ||
copy(destination: Table, callback: JobMetadataCallback): void; | ||
/** | ||
* @callback JobMetadataCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} JobMetadataResponse | ||
* @property {object} 0 The full API response. | ||
*/ | ||
/** | ||
* Copy data from multiple tables into this table. | ||
* | ||
* @param {Table|Table[]} sourceTables The | ||
* source table(s) to copy data from. | ||
* @param {object=} metadata Metadata to set with the copy operation. The | ||
* metadata object should be in the format of a | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`} | ||
* object. | ||
* @param {string} [metadata.jobId] Custom id for the underlying job. | ||
* @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job | ||
* id. | ||
* @param {JobMetadataCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobMetadataResponse>} | ||
* | ||
* @throws {Error} If a source other than a Table object is provided. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* const sourceTables = [ | ||
* dataset.table('your-table'), | ||
* dataset.table('your-second-table') | ||
* ]; | ||
* | ||
* table.copyFrom(sourceTables, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy | ||
* // for all available options. | ||
* //- | ||
* const metadata = { | ||
* createDisposition: 'CREATE_NEVER', | ||
* writeDisposition: 'WRITE_TRUNCATE' | ||
* }; | ||
* | ||
* table.copyFrom(sourceTables, metadata, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.copyFrom(sourceTables, metadata).then((data) => { | ||
* const apiResponse = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
copyFrom(sourceTables: Table | Table[], metadata?: CopyTableMetadata): Promise<JobMetadataResponse>; | ||
copyFrom(sourceTables: Table | Table[], metadata: CopyTableMetadata, callback: JobMetadataCallback): void; | ||
copyFrom(sourceTables: Table | Table[], callback: JobMetadataCallback): void; | ||
/** | ||
* Copy data from one table to another, optionally creating that table. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {Table} destination The destination table. | ||
* @param {object} [metadata] Metadata to set with the copy operation. The | ||
* metadata object should be in the format of a | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`} | ||
* object. | ||
* @param {string} [metadata.jobId] Custom job id. | ||
* @param {string} [metadata.jobPrefix] Prefix to apply to the job id. | ||
* @param {JobCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Job} callback.job The job used to copy your table. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @throws {Error} If a destination other than a Table object is provided. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* const yourTable = dataset.table('your-table'); | ||
* table.createCopyJob(yourTable, (err, job, apiResponse) => { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* }); | ||
* | ||
* //- | ||
* // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy | ||
* // for all available options. | ||
* //- | ||
* const metadata = { | ||
* createDisposition: 'CREATE_NEVER', | ||
* writeDisposition: 'WRITE_TRUNCATE' | ||
* }; | ||
* | ||
* table.createCopyJob(yourTable, metadata, (err, job, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.createCopyJob(yourTable, metadata).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
createCopyJob(destination: Table, metadata?: CreateCopyJobMetadata): Promise<JobResponse>; | ||
createCopyJob(destination: Table, metadata: CreateCopyJobMetadata, callback: JobCallback): void; | ||
createCopyJob(destination: Table, callback: JobCallback): void; | ||
/** | ||
* Copy data from multiple tables into this table. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {Table|Table[]} sourceTables The | ||
* source table(s) to copy data from. | ||
* @param {object} [metadata] Metadata to set with the copy operation. The | ||
* metadata object should be in the format of a | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy| `JobConfigurationTableCopy`} | ||
* object. | ||
* @param {string} [metadata.jobId] Custom job id. | ||
* @param {string} [metadata.jobPrefix] Prefix to apply to the job id. | ||
* @param {JobCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Job} callback.job The job used to copy your table. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @throws {Error} If a source other than a Table object is provided. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* const sourceTables = [ | ||
* dataset.table('your-table'), | ||
* dataset.table('your-second-table') | ||
* ]; | ||
* | ||
* const callback = (err, job, apiResponse) => { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* }; | ||
* | ||
* table.createCopyFromJob(sourceTables, callback); | ||
* | ||
* //- | ||
* // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy | ||
* // for all available options. | ||
* //- | ||
* const metadata = { | ||
* createDisposition: 'CREATE_NEVER', | ||
* writeDisposition: 'WRITE_TRUNCATE' | ||
* }; | ||
* | ||
* table.createCopyFromJob(sourceTables, metadata, callback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.createCopyFromJob(sourceTables, metadata).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
createCopyFromJob(source: Table | Table[], metadata?: CopyTableMetadata): Promise<JobResponse>; | ||
createCopyFromJob(source: Table | Table[], metadata: CopyTableMetadata, callback: JobCallback): void; | ||
createCopyFromJob(source: Table | Table[], callback: JobCallback): void; | ||
/** | ||
* Export table to Cloud Storage. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {string|File} destination Where the file should be exported | ||
* to. A string or a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* @param {object=} options - The configuration object. | ||
* @param {string} options.format - The format to export the data in. Allowed | ||
* options are "CSV", "JSON", "AVRO", or "PARQUET". Default: "CSV". | ||
* @param {boolean} options.gzip - Specify if you would like the file compressed | ||
* with GZIP. Default: false. | ||
* @param {string} [options.jobId] Custom job id. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the job id. | ||
* @param {JobCallback} callback - The callback function. | ||
* @param {?error} callback.err - An error returned while making this request | ||
* @param {Job} callback.job - The job used to export the table. | ||
* @param {object} callback.apiResponse - The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @throws {Error} If destination isn't a File object. | ||
* @throws {Error} If destination format isn't recongized. | ||
* | ||
* @example | ||
* ``` | ||
* const {Storage} = require('@google-cloud/storage'); | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* const storage = new Storage({ | ||
* projectId: 'grape-spaceship-123' | ||
* }); | ||
* const extractedFile = storage.bucket('institutions').file('2014.csv'); | ||
* | ||
* function callback(err, job, apiResponse) { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* } | ||
* | ||
* //- | ||
* // To use the default options, just pass a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* // | ||
* // Note: The exported format type will be inferred by the file's extension. | ||
* // If you wish to override this, or provide an array of destination files, | ||
* // you must provide an `options` object. | ||
* //- | ||
* table.createExtractJob(extractedFile, callback); | ||
* | ||
* //- | ||
* // If you need more customization, pass an `options` object. | ||
* //- | ||
* const options = { | ||
* format: 'json', | ||
* gzip: true | ||
* }; | ||
* | ||
* table.createExtractJob(extractedFile, options, callback); | ||
* | ||
* //- | ||
* // You can also specify multiple destination files. | ||
* //- | ||
* table.createExtractJob([ | ||
* storage.bucket('institutions').file('2014.json'), | ||
* storage.bucket('institutions-copy').file('2014.json') | ||
* ], options, callback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.createExtractJob(extractedFile, options).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
createExtractJob(destination: File, options?: CreateExtractJobOptions): Promise<JobResponse>; | ||
createExtractJob(destination: File, options: CreateExtractJobOptions, callback: JobCallback): void; | ||
createExtractJob(destination: File, callback: JobCallback): void; | ||
/** | ||
* Load data from a local file or Storage {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File}. | ||
* | ||
* By loading data this way, you create a load job that will run your data | ||
* load asynchronously. If you would like instantaneous access to your data, | ||
* insert it using {@liink Table#insert}. | ||
* | ||
* Note: The file type will be inferred by the given file's extension. If you | ||
* wish to override this, you must provide `metadata.format`. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {string|File|File[]} source The source file to load. A string (path) | ||
* to a local file, or one or more {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* objects. | ||
* @param {object} [metadata] Metadata to set with the load operation. The | ||
* metadata object should be in the format of the | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`} | ||
* property of a Jobs resource. | ||
* @param {string} [metadata.format] The format the data being loaded is in. | ||
* Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET". | ||
* @param {string} [metadata.jobId] Custom job id. | ||
* @param {string} [metadata.jobPrefix] Prefix to apply to the job id. | ||
* @param {JobCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {Job} callback.job The job used to load your data. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobResponse>} | ||
* | ||
* @throws {Error} If the source isn't a string file name or a File instance. | ||
* | ||
* @example | ||
* ``` | ||
* const {Storage} = require('@google-cloud/storage'); | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* //- | ||
* // Load data from a local file. | ||
* //- | ||
* const callback = (err, job, apiResponse) => { | ||
* // `job` is a Job object that can be used to check the status of the | ||
* // request. | ||
* }; | ||
* | ||
* table.createLoadJob('./institutions.csv', callback); | ||
* | ||
* //- | ||
* // You may also pass in metadata in the format of a Jobs resource. See | ||
* // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) | ||
* // for a full list of supported values. | ||
* //- | ||
* const metadata = { | ||
* encoding: 'ISO-8859-1', | ||
* sourceFormat: 'NEWLINE_DELIMITED_JSON' | ||
* }; | ||
* | ||
* table.createLoadJob('./my-data.csv', metadata, callback); | ||
* | ||
* //- | ||
* // Load data from a file in your Cloud Storage bucket. | ||
* //- | ||
* const storage = new Storage({ | ||
* projectId: 'grape-spaceship-123' | ||
* }); | ||
* const data = storage.bucket('institutions').file('data.csv'); | ||
* table.createLoadJob(data, callback); | ||
* | ||
* //- | ||
* // Load data from multiple files in your Cloud Storage bucket(s). | ||
* //- | ||
* table.createLoadJob([ | ||
* storage.bucket('institutions').file('2011.csv'), | ||
* storage.bucket('institutions').file('2012.csv') | ||
* ], callback); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.createLoadJob(data).then((data) => { | ||
* const job = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
createLoadJob(source: string | File, metadata?: JobLoadMetadata): Promise<JobResponse>; | ||
@@ -174,2 +578,10 @@ createLoadJob(source: string | File, metadata: JobLoadMetadata, callback: JobCallback): void; | ||
_createLoadJob(source: string | File | File[], metadata: JobLoadMetadata): Promise<JobResponse>; | ||
/** | ||
* Run a query as a job. No results are immediately returned. Instead, your | ||
* callback will be executed with a {@link Job} object that you must | ||
* ping for the results. See the Job documentation for explanations of how to | ||
* check on the status of the job. | ||
* | ||
* See {@link BigQuery#createQueryJob} for full documentation of this method. | ||
*/ | ||
createQueryJob(options: Query): Promise<JobResponse>; | ||
@@ -197,3 +609,3 @@ createQueryJob(options: Query, callback: JobCallback): void; | ||
* The metadata object should be in the format of the | ||
* [`configuration.load`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`} | ||
* property of a Jobs resource. If a string is given, it will be used | ||
@@ -210,7 +622,7 @@ * as the filetype. | ||
* | ||
* @see [Jobs: insert API Documentation]{@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert} | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert| Jobs: insert API Documentation} | ||
* | ||
* @param {string|object} [metadata] Metadata to set with the load operation. | ||
* The metadata object should be in the format of the | ||
* [`configuration.load`](https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`} | ||
* property of a Jobs resource. If a string is given, | ||
@@ -225,2 +637,3 @@ * it will be used as the filetype. | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
@@ -267,10 +680,242 @@ * const bigquery = new BigQuery(); | ||
* }); | ||
* ``` | ||
*/ | ||
createWriteStream(metadata: JobLoadMetadata | string): Writable; | ||
/** | ||
* Export table to Cloud Storage. | ||
* | ||
* @param {string|File} destination Where the file should be exported | ||
* to. A string or a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File}. | ||
* @param {object} [options] The configuration object. | ||
* @param {string} [options.format="CSV"] The format to export the data in. | ||
* Allowed options are "AVRO", "CSV", "JSON", "ORC" or "PARQUET". | ||
* @param {boolean} [options.gzip] Specify if you would like the file compressed | ||
* with GZIP. Default: false. | ||
* @param {string} [options.jobId] Custom id for the underlying job. | ||
* @param {string} [options.jobPrefix] Prefix to apply to the underlying job id. | ||
* @param {JobMetadataCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobMetadataResponse>} | ||
* | ||
* @throws {Error} If destination isn't a File object. | ||
* @throws {Error} If destination format isn't recongized. | ||
* | ||
* @example | ||
* ``` | ||
* const Storage = require('@google-cloud/storage'); | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* const storage = new Storage({ | ||
* projectId: 'grape-spaceship-123' | ||
* }); | ||
* const extractedFile = storage.bucket('institutions').file('2014.csv'); | ||
* | ||
* //- | ||
* // To use the default options, just pass a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* // | ||
* // Note: The exported format type will be inferred by the file's extension. | ||
* // If you wish to override this, or provide an array of destination files, | ||
* // you must provide an `options` object. | ||
* //- | ||
* table.extract(extractedFile, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If you need more customization, pass an `options` object. | ||
* //- | ||
* const options = { | ||
* format: 'json', | ||
* gzip: true | ||
* }; | ||
* | ||
* table.extract(extractedFile, options, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // You can also specify multiple destination files. | ||
* //- | ||
* table.extract([ | ||
* storage.bucket('institutions').file('2014.json'), | ||
* storage.bucket('institutions-copy').file('2014.json') | ||
* ], options, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.extract(extractedFile, options).then((data) => { | ||
* const apiResponse = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
extract(destination: File, options?: CreateExtractJobOptions): Promise<JobMetadataResponse>; | ||
extract(destination: File, options: CreateExtractJobOptions, callback?: JobMetadataCallback): void; | ||
extract(destination: File, callback?: JobMetadataCallback): void; | ||
/** | ||
* @callback RowsCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {array} rows The rows. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} RowsResponse | ||
* @property {array} 0 The rows. | ||
*/ | ||
getRows(options?: GetRowsOptions): Promise<RowsResponse>; | ||
getRows(options: GetRowsOptions, callback: RowsCallback): void; | ||
getRows(callback: RowsCallback): void; | ||
/** | ||
* @callback InsertRowsCallback | ||
* @param {?Error} err Request error, if any. | ||
* @param {?Error} err.errors If present, these represent partial | ||
* failures. It's possible for part of your request to be completed | ||
* successfully, while the other part was not. | ||
* @param {object} apiResponse The full API response. | ||
*/ | ||
/** | ||
* @typedef {array} InsertRowsResponse | ||
* @property {object} 0 The full API response. | ||
*/ | ||
/** | ||
* Stream data into BigQuery one record at a time without running a load job. | ||
* | ||
* If you need to create an entire table from a file, consider using | ||
* {@link Table#load} instead. | ||
* | ||
* Note, if a table was recently created, inserts may fail until the table | ||
* is consistent within BigQuery. If a `schema` is supplied, this method will | ||
* automatically retry those failed inserts, and it will even create the | ||
* table with the provided schema if it does not exist. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| Tabledata: insertAll API Documentation} | ||
* See {@link https://cloud.google.com/bigquery/quotas#streaming_inserts| Streaming Insert Limits} | ||
* See {@link https://developers.google.com/bigquery/troubleshooting-errors| Troubleshooting Errors} | ||
* | ||
* @param {object|object[]} rows The rows to insert into the table. | ||
* @param {object} [options] Configuration object. | ||
* @param {boolean} [options.createInsertId=true] Automatically insert a | ||
* default row id when one is not provided. | ||
* @param {boolean} [options.ignoreUnknownValues=false] Accept rows that contain | ||
* values that do not match the schema. The unknown values are ignored. | ||
* @param {number} [options.partialRetries=3] Number of times to retry | ||
* inserting rows for cases of partial failures. | ||
* @param {boolean} [options.raw] If `true`, the `rows` argument is expected to | ||
* be formatted as according to the | ||
* {@link https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll| specification}. | ||
* @param {string|object} [options.schema] If provided will automatically | ||
* create a table if it doesn't already exist. Note that this can take | ||
* longer than 2 minutes to complete. A comma-separated list of | ||
* name:type pairs. | ||
* Valid types are "string", "integer", "float", "boolean", and | ||
* "timestamp". If the type is omitted, it is assumed to be "string". | ||
* Example: "name:string, age:integer". Schemas can also be specified as a | ||
* JSON array of fields, which allows for nested and repeated fields. See | ||
* a {@link http://goo.gl/sl8Dmg| Table resource} for more detailed information. | ||
* @param {boolean} [options.skipInvalidRows=false] Insert all valid rows of a | ||
* request, even if invalid rows exist. | ||
* @param {string} [options.templateSuffix] Treat the destination table as a | ||
* base template, and insert the rows into an instance table named | ||
* "{destination}{templateSuffix}". BigQuery will manage creation of | ||
* the instance table, using the schema of the base template table. See | ||
* {@link https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables| Automatic table creation using template tables} | ||
* for considerations when working with templates tables. | ||
* @param {InsertRowsCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {object[]} callback.err.errors If present, these represent partial | ||
* failures. It's possible for part of your request to be completed | ||
* successfully, while the other part was not. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<InsertRowsResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* //- | ||
* // Insert a single row. | ||
* //- | ||
* table.insert({ | ||
* INSTNM: 'Motion Picture Institute of Michigan', | ||
* CITY: 'Troy', | ||
* STABBR: 'MI' | ||
* }, insertHandler); | ||
* | ||
* //- | ||
* // Insert multiple rows at a time. | ||
* //- | ||
* const rows = [ | ||
* { | ||
* INSTNM: 'Motion Picture Institute of Michigan', | ||
* CITY: 'Troy', | ||
* STABBR: 'MI' | ||
* }, | ||
* // ... | ||
* ]; | ||
* | ||
* table.insert(rows, insertHandler); | ||
* | ||
* //- | ||
* // Insert a row as according to the <a href="https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll">specification</a>. | ||
* //- | ||
* const row = { | ||
* insertId: '1', | ||
* json: { | ||
* INSTNM: 'Motion Picture Institute of Michigan', | ||
* CITY: 'Troy', | ||
* STABBR: 'MI' | ||
* } | ||
* }; | ||
* | ||
* const options = { | ||
* raw: true | ||
* }; | ||
* | ||
* table.insert(row, options, insertHandler); | ||
* | ||
* //- | ||
* // Handling the response. See <a href="https://developers.google.com/bigquery/troubleshooting-errors">Troubleshooting Errors</a> for best practices on how to handle errors. | ||
* //- | ||
* function insertHandler(err, apiResponse) { | ||
* if (err) { | ||
* // An API error or partial failure occurred. | ||
* | ||
* if (err.name === 'PartialFailureError') { | ||
* // Some rows failed to insert, while others may have succeeded. | ||
* | ||
* // err.errors (object[]): | ||
* // err.errors[].row (original row object passed to `insert`) | ||
* // err.errors[].errors[].reason | ||
* // err.errors[].errors[].message | ||
* } | ||
* } | ||
* } | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.insert(rows) | ||
* .then((data) => { | ||
* const apiResponse = data[0]; | ||
* }) | ||
* .catch((err) => { | ||
* // An API error or partial failure occurred. | ||
* | ||
* if (err.name === 'PartialFailureError') { | ||
* // Some rows failed to insert, while others may have succeeded. | ||
* | ||
* // err.errors (object[]): | ||
* // err.errors[].row (original row object passed to `insert`) | ||
* // err.errors[].errors[].reason | ||
* // err.errors[].errors[].message | ||
* } | ||
* }); | ||
* ``` | ||
*/ | ||
insert(rows: RowMetadata | RowMetadata[], options?: InsertRowsOptions): Promise<InsertRowsResponse>; | ||
@@ -311,15 +956,160 @@ insert(rows: RowMetadata | RowMetadata[], options: InsertRowsOptions, callback: InsertRowsCallback): void; | ||
private _insert; | ||
/** | ||
* Load data from a local file or Storage {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File}. | ||
* | ||
* By loading data this way, you create a load job that will run your data | ||
* load asynchronously. If you would like instantaneous access to your data, | ||
* insert it using {@link Table#insert}. | ||
* | ||
* Note: The file type will be inferred by the given file's extension. If you | ||
* wish to override this, you must provide `metadata.format`. | ||
* | ||
* @param {string|File} source The source file to load. A filepath as a string | ||
* or a {@link | ||
* https://googleapis.dev/nodejs/storage/latest/File.html File} | ||
* object. | ||
* @param {object} [metadata] Metadata to set with the load operation. The | ||
* metadata object should be in the format of the | ||
* {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad| `configuration.load`} | ||
* property of a Jobs resource. | ||
* @param {string} [metadata.format] The format the data being loaded is in. | ||
* Allowed options are "AVRO", "CSV", "JSON", "ORC", or "PARQUET". | ||
* @param {string} [metadata.jobId] Custom id for the underlying job. | ||
* @param {string} [metadata.jobPrefix] Prefix to apply to the underlying job | ||
* id. | ||
* @param {JobMetadataCallback} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<JobMetadataResponse>} | ||
* | ||
* @throws {Error} If the source isn't a string file name or a File instance. | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* //- | ||
* // Load data from a local file. | ||
* //- | ||
* table.load('./institutions.csv', (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // You may also pass in metadata in the format of a Jobs resource. See | ||
* // (https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad) | ||
* // for a full list of supported values. | ||
* //- | ||
* const metadata = { | ||
* encoding: 'ISO-8859-1', | ||
* sourceFormat: 'NEWLINE_DELIMITED_JSON' | ||
* }; | ||
* | ||
* table.load('./my-data.csv', metadata, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // Load data from a file in your Cloud Storage bucket. | ||
* //- | ||
* const gcs = require('@google-cloud/storage')({ | ||
* projectId: 'grape-spaceship-123' | ||
* }); | ||
* const data = gcs.bucket('institutions').file('data.csv'); | ||
* table.load(data, (err, apiResponse) => {}); | ||
* | ||
* //- | ||
* // Load data from multiple files in your Cloud Storage bucket(s). | ||
* //- | ||
* table.load([ | ||
* gcs.bucket('institutions').file('2011.csv'), | ||
* gcs.bucket('institutions').file('2012.csv') | ||
* ], function(err, apiResponse) {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.load(data).then(function(data) { | ||
* const apiResponse = data[0]; | ||
* }); | ||
* ``` | ||
*/ | ||
load(source: string | File, metadata?: JobLoadMetadata): Promise<JobMetadataResponse>; | ||
load(source: string | File, metadata: JobLoadMetadata, callback: JobMetadataCallback): void; | ||
load(source: string | File, callback: JobMetadataCallback): void; | ||
/** | ||
* Run a query scoped to your dataset. | ||
* | ||
* See {@link BigQuery#query} for full documentation of this method. | ||
* @param {object} query See {@link BigQuery#query} for full documentation of this method. | ||
* @param {function} [callback] See {@link BigQuery#query} for full documentation of this method. | ||
* @returns {Promise<SimpleQueryRowsResponse>} | ||
*/ | ||
query(query: Query): Promise<SimpleQueryRowsResponse>; | ||
query(query: string): Promise<SimpleQueryRowsResponse>; | ||
query(query: Query, callback: SimpleQueryRowsCallback): void; | ||
/** | ||
* Set the metadata on the table. | ||
* | ||
* See {@link https://cloud.google.com/bigquery/docs/reference/v2/tables/patch| Tables: patch API Documentation} | ||
* | ||
* @param {object} metadata The metadata key/value object to set. | ||
* @param {string} metadata.description A user-friendly description of the | ||
* table. | ||
* @param {string} metadata.name A descriptive name for the table. | ||
* @param {string|object} metadata.schema A comma-separated list of name:type | ||
* pairs. Valid types are "string", "integer", "float", "boolean", | ||
* "bytes", "record", and "timestamp". If the type is omitted, it is assumed | ||
* to be "string". Example: "name:string, age:integer". Schemas can also be | ||
* specified as a JSON array of fields, which allows for nested and | ||
* repeated fields. See a {@link http://goo.gl/sl8Dmg| Table resource} for more | ||
* detailed information. | ||
* @param {function} [callback] The callback function. | ||
* @param {?error} callback.err An error returned while making this request. | ||
* @param {object} callback.apiResponse The full API response. | ||
* @returns {Promise<common.SetMetadataResponse>} | ||
* | ||
* @example | ||
* ``` | ||
* const {BigQuery} = require('@google-cloud/bigquery'); | ||
* const bigquery = new BigQuery(); | ||
* const dataset = bigquery.dataset('my-dataset'); | ||
* const table = dataset.table('my-table'); | ||
* | ||
* const metadata = { | ||
* name: 'My recipes', | ||
* description: 'A table for storing my recipes.', | ||
* schema: 'name:string, servings:integer, cookingTime:float, quick:boolean' | ||
* }; | ||
* | ||
* table.setMetadata(metadata, (err, metadata, apiResponse) => {}); | ||
* | ||
* //- | ||
* // If the callback is omitted, we'll return a Promise. | ||
* //- | ||
* table.setMetadata(metadata).then((data) => { | ||
* const metadata = data[0]; | ||
* const apiResponse = data[1]; | ||
* }); | ||
* ``` | ||
*/ | ||
setMetadata(metadata: SetTableMetadataOptions): Promise<common.SetMetadataResponse>; | ||
setMetadata(metadata: SetTableMetadataOptions, callback: common.ResponseCallback): void; | ||
/** | ||
* Run a query scoped to your dataset. | ||
* @returns {Promise<PolicyResponse>} | ||
*/ | ||
getIamPolicy(optionsOrCallback?: GetPolicyOptions | PolicyCallback): Promise<PolicyResponse>; | ||
getIamPolicy(options: GetPolicyOptions, callback: PolicyCallback): void; | ||
/** | ||
* Run a query scoped to your dataset. | ||
* @returns {Promise<PolicyResponse>} | ||
*/ | ||
setIamPolicy(policy: Policy, options?: SetPolicyOptions): Promise<PolicyResponse>; | ||
setIamPolicy(policy: Policy, options: SetPolicyOptions, callback: PolicyCallback): void; | ||
setIamPolicy(policy: Policy, callback: PolicyCallback): void; | ||
/** | ||
* Run a query scoped to your dataset. | ||
* @returns {Promise<PermissionsResponse>} | ||
*/ | ||
testIamPermissions(permissions: string | string[]): Promise<PermissionsResponse>; | ||
@@ -326,0 +1116,0 @@ testIamPermissions(permissions: string | string[], callback: PermissionsCallback): void; |
@@ -7,2 +7,9 @@ # Changelog | ||
### [5.9.2](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.9.1...v5.9.2) (2021-11-16) | ||
### Bug Fixes | ||
* **cloud-rad:** move comments for TSDoc ([#1040](https://www.github.com/googleapis/nodejs-bigquery/issues/1040)) ([93c5e14](https://www.github.com/googleapis/nodejs-bigquery/commit/93c5e14a91418a293775e417b68ab22732bc48e7)) | ||
### [5.9.1](https://www.github.com/googleapis/nodejs-bigquery/compare/v5.9.0...v5.9.1) (2021-10-12) | ||
@@ -9,0 +16,0 @@ |
{ | ||
"name": "@google-cloud/bigquery", | ||
"description": "Google BigQuery Client Library for Node.js", | ||
"version": "5.9.1", | ||
"version": "5.9.2", | ||
"license": "Apache-2.0", | ||
@@ -72,6 +72,6 @@ "author": "Google LLC", | ||
"@types/ncp": "^2.0.1", | ||
"@types/node": "^14.0.0", | ||
"@types/node": "^16.0.0", | ||
"@types/proxyquire": "^1.3.28", | ||
"@types/sinon": "^10.0.0", | ||
"@types/tmp": "0.2.1", | ||
"@types/tmp": "0.2.2", | ||
"@types/uuid": "^8.0.0", | ||
@@ -91,3 +91,3 @@ "c8": "^7.0.0", | ||
"proxyquire": "^2.1.0", | ||
"sinon": "^11.0.0", | ||
"sinon": "^12.0.0", | ||
"tmp": "0.2.1", | ||
@@ -94,0 +94,0 @@ "typescript": "^3.8.3" |
@@ -156,2 +156,3 @@ [//]: # "This README.md file is auto-generated, all changes to this file will be lost." | ||
| Query External GCS Perm | [source code](https://github.com/googleapis/nodejs-bigquery/blob/main/samples/queryExternalGCSPerm.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryExternalGCSPerm.js,samples/README.md) | | ||
| Query External GCS Temp | [source code](https://github.com/googleapis/nodejs-bigquery/blob/main/samples/queryExternalGCSTemp.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryExternalGCSTemp.js,samples/README.md) | | ||
| Query Legacy | [source code](https://github.com/googleapis/nodejs-bigquery/blob/main/samples/queryLegacy.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryLegacy.js,samples/README.md) | | ||
@@ -158,0 +159,0 @@ | Query Legacy Large Results | [source code](https://github.com/googleapis/nodejs-bigquery/blob/main/samples/queryLegacyLargeResults.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-bigquery&page=editor&open_in_editor=samples/queryLegacyLargeResults.js,samples/README.md) | |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
560650
11365
253