Skip to content
# Start coding here...
import pandas as pd
from google.cloud import bigquery
from google.oauth2 import service_account
import json
diabetes = pd.read_csv('diabetes.csv')
diabetes.columns =diabetes.columns.str.lower()
print(diabetes.head())
Run cancelled

def bq_ingestion():
    
    service_account_info = json.load(open('usecases-serviceaccount.json'))
    credentials=service_account.Credentials.from_service_account_info(service_account_info, scopes=["https://www.googleapis.com/auth/cloud-platform"])
client = bigquery.Client(credentials=credentials, project='usecases-369820')
job_config = bigquery.LoadJobConfig(
    write_disposition = bigquery.WriteDisposition.WRITE_TRUNCATE,
    autodetect=True,
    skip_leading_rows=1,
    source_format=bigquery.SourceFormat.CSV)
    
try:
    with open('diabetes.csv','rb') as src_file:
            job = client.load_table_from_file('diabetes.csv', 'usecases-369820.kaggle_diabetes.diabetes', job_config=job_config)
            job.result()
    return True
except Exception as e:
    return e
print(bq_ingestion())