diff --git a/bigquery/api/snippets/browse_table.php b/bigquery/api/snippets/browse_table.php index 8d9ee4824e..74734b6a7a 100644 --- a/bigquery/api/snippets/browse_table.php +++ b/bigquery/api/snippets/browse_table.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) < 4 || count($argv) > 5) { - return print("Usage: php snippets/browse_table.php PROJECT_ID DATASET_ID TABLE_ID [NUM_RESULTS]\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID [NUM_RESULTS]\n", __FILE__); } list($_, $projectId, $datasetId, $tableId) = $argv; $maxResults = isset($argv[4]) ? $argv[4] : 10; diff --git a/bigquery/api/snippets/copy_table.php b/bigquery/api/snippets/copy_table.php index 2e3ec7491c..6157633f4e 100644 --- a/bigquery/api/snippets/copy_table.php +++ b/bigquery/api/snippets/copy_table.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 5) { - return print("Usage: php snippets/copy_table.php PROJECT_ID DATASET_ID SOURCE_TABLE_ID DESTINATION_TABLE_ID\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID SOURCE_TABLE_ID DESTINATION_TABLE_ID\n", __FILE__); } list($_, $projectId, $datasetId, $sourceTableId, $destinationTableId) = $argv; diff --git a/bigquery/api/snippets/create_dataset.php b/bigquery/api/snippets/create_dataset.php index 92a5546662..46f07eeb59 100644 --- a/bigquery/api/snippets/create_dataset.php +++ b/bigquery/api/snippets/create_dataset.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 3) { - return print("Usage: php snippets/create_dataset.php PROJECT_ID DATASET_ID\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__); } list($_, $projectId, $datasetId) = $argv; diff --git a/bigquery/api/snippets/create_table.php b/bigquery/api/snippets/create_table.php index 43607b6ba6..0c8e69e672 100644 --- a/bigquery/api/snippets/create_table.php +++ b/bigquery/api/snippets/create_table.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) < 4 || count($argv) > 5) { - return print("Usage: php snippets/create_table.php PROJECT_ID DATASET_ID TABLE_ID [FIELDS]\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID [FIELDS]\n", __FILE__); } list($_, $projectId, $datasetId, $tableId) = $argv; $fields = isset($argv[4]) ? json_decode($argv[4]) : [['name' => 'field1', 'type' => 'string']]; diff --git a/bigquery/api/snippets/delete_dataset.php b/bigquery/api/snippets/delete_dataset.php index e6a83b5d38..54fb0e1e99 100644 --- a/bigquery/api/snippets/delete_dataset.php +++ b/bigquery/api/snippets/delete_dataset.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) > 3) { - return print("Usage: php snippets/delete_dataset.php PROJECT_ID DATASET_ID\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__); } list($_, $projectId, $datasetId) = $argv; diff --git a/bigquery/api/snippets/delete_table.php b/bigquery/api/snippets/delete_table.php index 91b87320ab..f3ab2ac773 100644 --- a/bigquery/api/snippets/delete_table.php +++ b/bigquery/api/snippets/delete_table.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 4) { - return print("Usage: php snippets/delete_table.php PROJECT_ID DATASET_ID TABLE_ID\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID\n", __FILE__); } list($_, $projectId, $datasetId, $tableId) = $argv; diff --git a/bigquery/api/snippets/extract_table.php b/bigquery/api/snippets/extract_table.php index 13825f8b9e..52564ab207 100644 --- a/bigquery/api/snippets/extract_table.php +++ b/bigquery/api/snippets/extract_table.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) < 6 || count($argv) > 7) { - return print("Usage: php snippets/extract_table.php PROJECT_ID DATASET_ID TABLE_ID BUCKET_NAME OBJECT_NAME [FORMAT]\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID BUCKET_NAME OBJECT_NAME [FORMAT]\n", __FILE__); } list($_, $projectId, $datasetId, $tableId, $bucketName, $objectName) = $argv; diff --git a/bigquery/api/snippets/import_from_local_csv.php b/bigquery/api/snippets/import_from_local_csv.php index aaf80a1bee..d12e117652 100644 --- a/bigquery/api/snippets/import_from_local_csv.php +++ b/bigquery/api/snippets/import_from_local_csv.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 5) { - return print("Usage: php snippets/import_from_local_csv.php PROJECT_ID DATASET_ID TABLE_ID SOURCE\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID SOURCE\n", __FILE__); } list($_, $projectId, $datasetId, $tableId, $source) = $argv; diff --git a/bigquery/api/snippets/import_from_storage_csv.php b/bigquery/api/snippets/import_from_storage_csv.php index 650cf64733..6cf47990b2 100644 --- a/bigquery/api/snippets/import_from_storage_csv.php +++ b/bigquery/api/snippets/import_from_storage_csv.php @@ -24,7 +24,7 @@ // Include Google Cloud dependendencies using Composer require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 3) { - return print("Usage: php snippets/import_from_storage.php PROJECT_ID DATASET_ID\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__); } list($_, $projectId, $datasetId) = $argv; diff --git a/bigquery/api/snippets/import_from_storage_csv_autodetect.php b/bigquery/api/snippets/import_from_storage_csv_autodetect.php new file mode 100644 index 0000000000..1d7853ab94 --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_csv_autodetect.php @@ -0,0 +1,66 @@ + $projectId, +]); +$dataset = $bigQuery->dataset($datasetId); +$table = $dataset->table('us_states'); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv'; +$loadConfig = $table->loadFromStorage($gcsUri)->autodetect(true)->skipLeadingRows(1); +$job = $table->runJob($loadConfig); +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_csv_autodetect] diff --git a/bigquery/api/snippets/import_from_storage_csv_truncate.php b/bigquery/api/snippets/import_from_storage_csv_truncate.php new file mode 100644 index 0000000000..35b8498756 --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_csv_truncate.php @@ -0,0 +1,68 @@ + $projectId, +]); +$table = $bigQuery->dataset($datasetId)->table($tableId); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv'; +$loadConfig = $table->loadFromStorage($gcsUri)->skipLeadingRows(1)->writeDisposition('WRITE_TRUNCATE'); +$job = $table->runJob($loadConfig); + +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); + +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_csv_truncate] diff --git a/bigquery/api/snippets/import_from_storage_json.php b/bigquery/api/snippets/import_from_storage_json.php index 21244a8215..647e5bcb20 100644 --- a/bigquery/api/snippets/import_from_storage_json.php +++ b/bigquery/api/snippets/import_from_storage_json.php @@ -24,7 +24,7 @@ // Include Google Cloud dependendencies using Composer require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 3) { - return print("Usage: php snippets/import_from_storage.php PROJECT_ID DATASET_ID\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__); } list($_, $projectId, $datasetId) = $argv; diff --git a/bigquery/api/snippets/import_from_storage_json_autodetect.php b/bigquery/api/snippets/import_from_storage_json_autodetect.php new file mode 100644 index 0000000000..fbb3957e8d --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_json_autodetect.php @@ -0,0 +1,66 @@ + $projectId, +]); +$dataset = $bigQuery->dataset($datasetId); +$table = $dataset->table('us_states'); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json'; +$loadConfig = $table->loadFromStorage($gcsUri)->autodetect(true)->sourceFormat('NEWLINE_DELIMITED_JSON'); +$job = $table->runJob($loadConfig); +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_json_autodetect] diff --git a/bigquery/api/snippets/import_from_storage_json_truncate.php b/bigquery/api/snippets/import_from_storage_json_truncate.php new file mode 100644 index 0000000000..6c9ed684e0 --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_json_truncate.php @@ -0,0 +1,68 @@ + $projectId, +]); +$table = $bigQuery->dataset($datasetId)->table($tableId); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json'; +$loadConfig = $table->loadFromStorage($gcsUri)->sourceFormat('NEWLINE_DELIMITED_JSON')->writeDisposition('WRITE_TRUNCATE'); +$job = $table->runJob($loadConfig); + +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); + +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_json_truncate] diff --git a/bigquery/api/snippets/import_from_storage_orc.php b/bigquery/api/snippets/import_from_storage_orc.php new file mode 100644 index 0000000000..27bad15cd9 --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_orc.php @@ -0,0 +1,66 @@ + $projectId, +]); +$dataset = $bigQuery->dataset($datasetId); +$table = $dataset->table('us_states'); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.orc'; +$loadConfig = $table->loadFromStorage($gcsUri)->sourceFormat('ORC'); +$job = $table->runJob($loadConfig); +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_orc] diff --git a/bigquery/api/snippets/import_from_storage_orc_truncate.php b/bigquery/api/snippets/import_from_storage_orc_truncate.php new file mode 100644 index 0000000000..839839eefd --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_orc_truncate.php @@ -0,0 +1,68 @@ + $projectId, +]); +$table = $bigQuery->dataset($datasetId)->table($tableId); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.orc'; +$loadConfig = $table->loadFromStorage($gcsUri)->sourceFormat('ORC')->writeDisposition('WRITE_TRUNCATE'); +$job = $table->runJob($loadConfig); + +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); + +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_orc_truncate] diff --git a/bigquery/api/snippets/import_from_storage_parquet.php b/bigquery/api/snippets/import_from_storage_parquet.php new file mode 100644 index 0000000000..d9555f5c0e --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_parquet.php @@ -0,0 +1,66 @@ + $projectId, +]); +$dataset = $bigQuery->dataset($datasetId); +$table = $dataset->table('us_states'); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.parquet'; +$loadConfig = $table->loadFromStorage($gcsUri)->sourceFormat('PARQUET'); +$job = $table->runJob($loadConfig); +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_parquet] diff --git a/bigquery/api/snippets/import_from_storage_parquet_truncate.php b/bigquery/api/snippets/import_from_storage_parquet_truncate.php new file mode 100644 index 0000000000..89ed4c1138 --- /dev/null +++ b/bigquery/api/snippets/import_from_storage_parquet_truncate.php @@ -0,0 +1,68 @@ + $projectId, +]); +$table = $bigQuery->dataset($datasetId)->table($tableId); + +// create the import job +$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.parquet'; +$loadConfig = $table->loadFromStorage($gcsUri)->sourceFormat('PARQUET')->writeDisposition('WRITE_TRUNCATE'); +$job = $table->runJob($loadConfig); + +// poll the job until it is complete +$backoff = new ExponentialBackoff(10); +$backoff->execute(function () use ($job) { + print('Waiting for job to complete' . PHP_EOL); + $job->reload(); + if (!$job->isComplete()) { + throw new Exception('Job has not yet completed', 500); + } +}); + +// check if the job has errors +if (isset($job->info()['status']['errorResult'])) { + $error = $job->info()['status']['errorResult']['message']; + printf('Error running job: %s' . PHP_EOL, $error); +} else { + print('Data imported successfully' . PHP_EOL); +} +# [END bigquery_load_table_gcs_parquet_truncate] diff --git a/bigquery/api/snippets/insert_sql.php b/bigquery/api/snippets/insert_sql.php index 14953e0902..b9501a8e42 100644 --- a/bigquery/api/snippets/insert_sql.php +++ b/bigquery/api/snippets/insert_sql.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 4) { - return print("Usage: php snippets/insert_sql.php PROJECT_ID DATASET_ID SOURCE\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID SOURCE\n", __FILE__); } list($_, $projectId, $datasetId, $source) = $argv; diff --git a/bigquery/api/snippets/list_datasets.php b/bigquery/api/snippets/list_datasets.php index d797f22fb6..6063226d27 100644 --- a/bigquery/api/snippets/list_datasets.php +++ b/bigquery/api/snippets/list_datasets.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 2) { - return print("Usage: php snippets/list_datasets.php PROJECT_ID\n"); + return printf("Usage: php %s PROJECT_ID\n", __FILE__); } list($_, $projectId) = $argv; diff --git a/bigquery/api/snippets/list_tables.php b/bigquery/api/snippets/list_tables.php index 4d74572117..695356d285 100644 --- a/bigquery/api/snippets/list_tables.php +++ b/bigquery/api/snippets/list_tables.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 3) { - return print("Usage: php snippets/list_tables.php PROJECT_ID DATASET_ID\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__); } list($_, $projectId, $datasetId) = $argv; diff --git a/bigquery/api/snippets/paginate_table.php b/bigquery/api/snippets/paginate_table.php index 26105fd854..46776a4364 100644 --- a/bigquery/api/snippets/paginate_table.php +++ b/bigquery/api/snippets/paginate_table.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) < 4 || count($argv) > 5) { - return print("Usage: php snippets/paginate_table.php PROJECT_ID DATASET_ID TABLE_ID [NUM_RESULTS]\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID [NUM_RESULTS]\n", __FILE__); } list($_, $projectId, $datasetId, $tableId) = $argv; $maxResults = isset($argv[4]) ? $argv[4] : 10; diff --git a/bigquery/api/snippets/run_query.php b/bigquery/api/snippets/run_query.php index e25d057999..6ac4d9a04d 100644 --- a/bigquery/api/snippets/run_query.php +++ b/bigquery/api/snippets/run_query.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 3) { - return print("Usage: php snippets/run_query.php PROJECT_ID SQL_QUERY\n"); + return printf("Usage: php %s PROJECT_ID SQL_QUERY\n", __FILE__); } list($_, $projectId, $query) = $argv; diff --git a/bigquery/api/snippets/run_query_as_job.php b/bigquery/api/snippets/run_query_as_job.php index 8b11c3ab5a..c803e20073 100644 --- a/bigquery/api/snippets/run_query_as_job.php +++ b/bigquery/api/snippets/run_query_as_job.php @@ -25,7 +25,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) != 3) { - return print("Usage: php snippets/run_query_as_job.php PROJECT_ID SQL_QUERY\n"); + return printf("Usage: php %s PROJECT_ID SQL_QUERY\n", __FILE__); } list($_, $projectId, $query) = $argv; diff --git a/bigquery/api/snippets/stream_row.php b/bigquery/api/snippets/stream_row.php index 50627dd2e1..a134531a89 100644 --- a/bigquery/api/snippets/stream_row.php +++ b/bigquery/api/snippets/stream_row.php @@ -27,7 +27,7 @@ require_once __DIR__ . '/../vendor/autoload.php'; if (count($argv) < 4 || count($argv) > 5) { - return print("Usage: php snippets/stream_row.php PROJECT_ID DATASET_ID TABLE_ID [DATA]\n"); + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID [DATA]\n", __FILE__); } list($_, $projectId, $datasetId, $tableId) = $argv; $data = isset($argv[4]) ? json_decode($argv[4], true) : ["field1" => "value1"]; diff --git a/bigquery/api/test/bigqueryTest.php b/bigquery/api/test/bigqueryTest.php index c459b6310e..9b2aa5eac5 100644 --- a/bigquery/api/test/bigqueryTest.php +++ b/bigquery/api/test/bigqueryTest.php @@ -33,6 +33,7 @@ class FunctionsTest extends TestCase private static $datasetId; private static $dataset; + private static $tempTables = []; public static function setUpBeforeClass() { @@ -185,7 +186,7 @@ public function testImportFromFile() /** * @dataProvider provideImportFromStorage */ - public function testImportFromStorage($snippet) + public function testImportFromStorage($snippet, $runTruncateSnippet = false) { // run the import $output = $this->runSnippet($snippet, [ @@ -196,22 +197,30 @@ public function testImportFromStorage($snippet) $tableId = 'us_states'; // verify table contents - $query = sprintf('SELECT * FROM `%s.%s`', self::$datasetId, $tableId); - $testFunction = function () use ($query) { - $output = $this->runSnippet('run_query', [$query]); - $this->assertContains('Washington', $output); - }; - - $this->runEventuallyConsistentTest($testFunction); $table = self::$dataset->table($tableId); - $table->delete(); + self::$tempTables[] = $table; + $this->verifyStatesTable($table); + + if ($runTruncateSnippet) { + $truncateSnippet = sprintf('%s_truncate', $snippet); + $output = $this->runSnippet($truncateSnippet, [ + self::$datasetId, + $tableId, + ]); + $this->assertContains('Data imported successfully', $output); + $this->verifyStatesTable($table); + } } public function provideImportFromStorage() { return [ - ['import_from_storage_csv'], - ['import_from_storage_json'] + ['import_from_storage_csv', true], + ['import_from_storage_json', true], + ['import_from_storage_orc', true], + ['import_from_storage_parquet', true], + ['import_from_storage_csv_autodetect'], + ['import_from_storage_json_autodetect'], ]; } @@ -347,6 +356,31 @@ private function verifyTempTable($tempTableId) $this->runEventuallyConsistentTest($testFunction); } + private function verifyStatesTable($table) + { + $numRows = 0; + $foundValue = false; + foreach ($table->rows([]) as $row) { + foreach ($row as $column => $value) { + if ($value == 'Washington') { + $foundValue = true; + } + } + $numRows++; + } + $this->assertTrue($foundValue); + $this->assertEquals($numRows, 50); + } + + public function tearDown() + { + if (self::$tempTables) { + while ($tempTable = array_pop(self::$tempTables)) { + $tempTable->delete(); + } + } + } + public static function tearDownAfterClass() { self::$dataset->delete(['deleteContents' => true]);