@@ -176,6 +176,49 @@ append-only tables, using the processing power of Google's infrastructure.
176176
177177This package is still being implemented, but it is almost complete!
178178
179+ Load data from CSV
180+ ~~~~~~~~~~~~~~~~~~
181+
182+ .. code :: python
183+
184+ import csv
185+
186+ from gcloud import bigquery
187+ from gcloud.bigquery import SchemaField
188+
189+ client = bigquery.Client()
190+
191+ dataset = client.dataset(' dataset_name' )
192+ dataset.create() # API request
193+
194+ SCHEMA = [
195+ SchemaField(' full_name' , ' STRING' , mode = ' required' ),
196+ SchemaField(' age' , ' INTEGER' , mode = ' required' ),
197+ ]
198+ table = dataset.table(' table_name' , SCHEMA )
199+ table.create()
200+
201+ with open (' csv_file' , ' rb' ) as readable:
202+ table.upload_from_file(
203+ readable, source_format = ' CSV' , skip_leading_rows = 1 )
204+
205+ Perform a synchronous query
206+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
207+
208+ .. code :: python
209+
210+ # Perform a synchronous query.
211+ QUERY = (
212+ ' SELECT name FROM [bigquery-public-data:usa_names.usa_1910_2013] '
213+ ' WHERE state = "TX"' )
214+ query = client.run_sync_query(' %s LIMIT 100' % QUERY )
215+ query.timeout_ms = TIMEOUT_MS
216+ query.run()
217+
218+ for row in query.rows:
219+ print row
220+
221+
179222 See the ``gcloud-python `` API `BigQuery documentation `_ to learn how to connect
180223to BigQuery using this Client Library.
181224
0 commit comments