Python pandas_gbq.to_gbq() Examples
The following are 9
code examples of pandas_gbq.to_gbq().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
pandas_gbq
, or try the search function
.
Example #1
Source File: to_gbq_simple.py From pandas-gbq with BSD 3-Clause "New" or "Revised" License | 7 votes |
def main(project_id, table_id): # [START bigquery_pandas_gbq_to_gbq_simple] import pandas import pandas_gbq # TODO: Set project_id to your Google Cloud Platform project ID. # project_id = "my-project" # TODO: Set table_id to the full destination table ID (including the # dataset ID). # table_id = 'my_dataset.my_table' df = pandas.DataFrame( { "my_string": ["a", "b", "c"], "my_int64": [1, 2, 3], "my_float64": [4.0, 5.0, 6.0], "my_bool1": [True, False, True], "my_bool2": [False, True, False], "my_dates": pandas.date_range("now", periods=3), } ) pandas_gbq.to_gbq(df, table_id, project_id=project_id) # [END bigquery_pandas_gbq_to_gbq_simple]
Example #2
Source File: to_gbq_simple.py From pandas-gbq with BSD 3-Clause "New" or "Revised" License | 6 votes |
def main(project_id, table_id): # [START bigquery_pandas_gbq_to_gbq_simple] import pandas import pandas_gbq # TODO: Set project_id to your Google Cloud Platform project ID. # project_id = "my-project" # TODO: Set table_id to the full destination table ID (including the # dataset ID). # table_id = 'my_dataset.my_table' df = pandas.DataFrame( { "my_string": ["a", "b", "c"], "my_int64": [1, 2, 3], "my_float64": [4.0, 5.0, 6.0], "my_bool1": [True, False, True], "my_bool2": [False, True, False], "my_dates": pandas.date_range("now", periods=3), } ) pandas_gbq.to_gbq(df, table_id, project_id=project_id) # [END bigquery_pandas_gbq_to_gbq_simple]
Example #3
Source File: gbq.py From recruit with Apache License 2.0 | 5 votes |
def to_gbq(dataframe, destination_table, project_id=None, chunksize=None, reauth=False, if_exists='fail', auth_local_webserver=False, table_schema=None, location=None, progress_bar=True, credentials=None, verbose=None, private_key=None): pandas_gbq = _try_import() return pandas_gbq.to_gbq( dataframe, destination_table, project_id=project_id, chunksize=chunksize, reauth=reauth, if_exists=if_exists, auth_local_webserver=auth_local_webserver, table_schema=table_schema, location=location, progress_bar=progress_bar, credentials=credentials, verbose=verbose, private_key=private_key)
Example #4
Source File: gbq.py From vnpy_crypto with MIT License | 5 votes |
def to_gbq(dataframe, destination_table, project_id, chunksize=None, verbose=None, reauth=False, if_exists='fail', private_key=None, auth_local_webserver=False, table_schema=None): pandas_gbq = _try_import() return pandas_gbq.to_gbq( dataframe, destination_table, project_id, chunksize=chunksize, verbose=verbose, reauth=reauth, if_exists=if_exists, private_key=private_key, auth_local_webserver=auth_local_webserver, table_schema=table_schema)
Example #5
Source File: gbq.py From predictive-maintenance-using-machine-learning with Apache License 2.0 | 5 votes |
def to_gbq(dataframe, destination_table, project_id=None, chunksize=None, reauth=False, if_exists='fail', auth_local_webserver=False, table_schema=None, location=None, progress_bar=True, credentials=None, verbose=None, private_key=None): pandas_gbq = _try_import() return pandas_gbq.to_gbq( dataframe, destination_table, project_id=project_id, chunksize=chunksize, reauth=reauth, if_exists=if_exists, auth_local_webserver=auth_local_webserver, table_schema=table_schema, location=location, progress_bar=progress_bar, credentials=credentials, verbose=verbose, private_key=private_key)
Example #6
Source File: gbq.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def to_gbq(dataframe, destination_table, project_id, chunksize=10000, verbose=True, reauth=False, if_exists='fail', private_key=None): pandas_gbq = _try_import() pandas_gbq.to_gbq(dataframe, destination_table, project_id, chunksize=chunksize, verbose=verbose, reauth=reauth, if_exists=if_exists, private_key=private_key)
Example #7
Source File: gbq.py From elasticintel with GNU General Public License v3.0 | 5 votes |
def to_gbq(dataframe, destination_table, project_id, chunksize=10000, verbose=True, reauth=False, if_exists='fail', private_key=None): pandas_gbq = _try_import() pandas_gbq.to_gbq(dataframe, destination_table, project_id, chunksize=chunksize, verbose=verbose, reauth=reauth, if_exists=if_exists, private_key=private_key)
Example #8
Source File: bigquery.py From incubator-superset with Apache License 2.0 | 5 votes |
def df_to_sql(cls, df: pd.DataFrame, **kwargs: Any) -> None: """ Upload data from a Pandas DataFrame to BigQuery. Calls `DataFrame.to_gbq()` which requires `pandas_gbq` to be installed. :param df: Dataframe with data to be uploaded :param kwargs: kwargs to be passed to to_gbq() method. Requires that `schema`, `name` and `con` are present in kwargs. `name` and `schema` are combined and passed to `to_gbq()` as `destination_table`. """ try: import pandas_gbq from google.oauth2 import service_account except ImportError: raise Exception( "Could not import libraries `pandas_gbq` or `google.oauth2`, which are " "required to be installed in your environment in order " "to upload data to BigQuery" ) if not ("name" in kwargs and "schema" in kwargs and "con" in kwargs): raise Exception("name, schema and con need to be defined in kwargs") gbq_kwargs = {} gbq_kwargs["project_id"] = kwargs["con"].engine.url.host gbq_kwargs["destination_table"] = f"{kwargs.pop('schema')}.{kwargs.pop('name')}" # add credentials if they are set on the SQLAlchemy Dialect: creds = kwargs["con"].dialect.credentials_info if creds: credentials = service_account.Credentials.from_service_account_info(creds) gbq_kwargs["credentials"] = credentials # Only pass through supported kwargs supported_kwarg_keys = {"if_exists"} for key in supported_kwarg_keys: if key in kwargs: gbq_kwargs[key] = kwargs[key] pandas_gbq.to_gbq(df, **gbq_kwargs)
Example #9
Source File: bigquery_tests.py From incubator-superset with Apache License 2.0 | 4 votes |
def test_df_to_sql(self): """ DB Eng Specs (bigquery): Test DataFrame to SQL contract """ # test missing google.oauth2 dependency sys.modules["pandas_gbq"] = mock.MagicMock() df = DataFrame() self.assertRaisesRegexp( Exception, "Could not import libraries", BigQueryEngineSpec.df_to_sql, df, con="some_connection", schema="schema", name="name", ) invalid_kwargs = [ {"name": "some_name"}, {"schema": "some_schema"}, {"con": "some_con"}, {"name": "some_name", "con": "some_con"}, {"name": "some_name", "schema": "some_schema"}, {"con": "some_con", "schema": "some_schema"}, ] # Test check for missing required kwargs (name, schema, con) sys.modules["google.oauth2"] = mock.MagicMock() for invalid_kwarg in invalid_kwargs: self.assertRaisesRegexp( Exception, "name, schema and con need to be defined in kwargs", BigQueryEngineSpec.df_to_sql, df, **invalid_kwarg, ) import pandas_gbq from google.oauth2 import service_account pandas_gbq.to_gbq = mock.Mock() service_account.Credentials.from_service_account_info = mock.MagicMock( return_value="account_info" ) connection = mock.Mock() connection.engine.url.host = "google-host" connection.dialect.credentials_info = "secrets" BigQueryEngineSpec.df_to_sql( df, con=connection, schema="schema", name="name", if_exists="extra_key" ) pandas_gbq.to_gbq.assert_called_with( df, project_id="google-host", destination_table="schema.name", credentials="account_info", if_exists="extra_key", )