import requests
import json,os,re,datetime
import csv
import pandas as pd
zomato_api_key= "" #os.getenv('auth_key') # importing the key into the environment variable
path_to_folder= "" # destination path
print(zomato_api_key)
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
headers1={'user-key' : zomato_api_key}
#rests_url=('https://developers.zomato.com/api/v2.1/cities?q=Santiago')#
rests_url = ('https://developers.zomato.com/api/v2.1/search?entity_id=40&entity_type=city&start=131&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
print(read_rest)
{'results_found': 939, 'results_start': 0, 'results_shown': 0, 'restaurants': []}
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list=[]
def call_1():
for i in range(1, 80):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=101&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list1=[]
def call_2():
for i in range(1, 80):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=121&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list1.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list2=[]
def call_3():
for i in range(1, 80):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=141&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list2.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list3=[]
def call_4():
for i in range(1, 80):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=161&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list3.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list4=[]
def call_5():
for i in range(1, 80):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=181&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list4.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list5=[]
def call_6():
for i in range(276, 310):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=101&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list5.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list6=[]
def call_7():
for i in range(276, 310):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=121&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list6.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list7=[]
def call_8():
for i in range(276, 310):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=141&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list7.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list8=[]
def call_9():
for i in range(276, 310):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=161&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list8.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
headers1={'user-key' : zomato_api_key}
read_rest=[]
rest_list9=[]
def call_10():
for i in range(276, 310):
read_rest=[]
try:
rests_url=('https://developers.zomato.com/api/v2.1/search?entity_id='+str(i)+'&entity_type=city&start=181&count=20') # fetching the data from this url
get_request = requests.get(rests_url, headers=headers1)
read_rest=json.loads(get_request.text) #loading the data fetched to an object
rest_list9.append(read_rest)
except requests.exceptions.ConnectionError as r:
r.status_code = "Connection refused"
call_1()
call_2()
call_3()
call_4()
call_5()
call_6()
call_7()
call_8()
call_9()
call_10()
path_file=os.path.join(path_to_folder,'file11'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file12'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list1,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file13'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list2,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file14'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list3,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file15'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list4,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file16'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list5,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file17'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list6,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file18'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list7,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file19'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list8,jsonfile, ensure_ascii=False) #dumping the data into json file
path_file=os.path.join(path_to_folder,'file20'+'.json') #collection of the raw data into json file
with open(path_file,'w', encoding="utf-8") as jsonfile:
json.dump(rest_list9,jsonfile, ensure_ascii=False) #dumping the data into json file
path_to_folder= "PATH TO FOLDER"
path_to_json = "PATH TO JSON"
json_files = [pos_json for pos_json in os.listdir(path_to_folder) if pos_json.endswith('.json')] #looping in the folder "data" where the the json files are placed
print(json_files)
['file1.json', 'file10.json', 'file11.json', 'file12.json', 'file13.json', 'file14.json', 'file15.json', 'file16.json', 'file17.json', 'file18.json', 'file19.json', 'file2.json', 'file20.json', 'file3.json', 'file4.json', 'file5.json', 'file6.json', 'file7.json', 'file8.json', 'file9.json']
def ensure_dir(path_to_json): #declaring a function
if not os.path.exists(path_to_json): #if the folder doesn't exist then create the directory
os.makedirs(path_to_json)# make a directory
#len(loaded_file[7]['restaurants'])#.keys())
#(loaded_file[7]['restaurants'])#.keys())
#loaded_file[1]['restaurants'][3].keys()
#loaded_file
import unidecode
path=""
for js in json_files:
with open(os.path.join(path_to_folder, js),'r', encoding='utf-8', errors="ignore") as json_file: #append the path of file to the source folde
loaded_file = json.load(json_file,strict=False) #load the file to be selected
for i in range(len(loaded_file)):
if 'restaurants' in loaded_file[i].keys():
for j in range(len(loaded_file[i]['restaurants'])):
if('restaurant' in loaded_file[i]['restaurants'][j].keys()):
url = loaded_file[i]['restaurants'][j]['restaurant']['url']
if "dummy" in url:
continue
else:
currency=loaded_file[i]['restaurants'][j]['restaurant']['currency']
city=loaded_file[i]['restaurants'][j]['restaurant']['location']['city']
locality=loaded_file[i]['restaurants'][j]['restaurant']['location']['locality']
rating_text=loaded_file[i]['restaurants'][j]['restaurant']['user_rating']['rating_text']
name_res=loaded_file[i]['restaurants'][j]['restaurant']['name'].replace('/'," ")
name_res=name_res.replace('|'," ")
name_res=name_res.replace("I:ba","I-ba")
name_res=name_res.replace('á',"a")
name_res=name_res.replace('é',"e")
name_res=name_res.replace('ě',"e")
name_res=name_res.replace('\'s',"s")
name_res=name_res.replace('\"Country Store\"',"Country Store")
name_res=name_res.replace('¿Por Que No?',"Por_Que_No")
locality=locality.replace(':',"-")
name_res=name_res.strip()
res_id=loaded_file[i]['restaurants'][j]['restaurant']['R']['res_id']
if currency=='Rs.': #replacing the currency with the name of the currency
rs=currency.replace('Rs.',"Indian Rupees(Rs.)")
path=os.path.join(path_to_json +'/' + rs)
elif currency=='IDR': #replacing the currency with the name of the currency
idr=currency.replace('IDR',"Indonesian Rupiah(IDR)")
path=os.path.join(path_to_json +'/' + idr)
elif currency=='P': #replacing the currency with the name of the currency
p=currency.replace('P',"Botswana Pula(P)")
path=os.path.join(path_to_json +'/' + p)
elif currency=='$': #replacing the currency with the name of the currency
dol=currency.replace('$',"Dollar($)")
path=os.path.join(path_to_json +'/' + dol)
elif currency=='€': #replacing the currency with the name of the currency
euro=currency.replace('€',"Euro(€)")
path=os.path.join(path_to_json +'/' +euro)
elif currency=='£': #replacing the currency with the name of the currency
pound=currency.replace('£',"Pounds(£)")
path=os.path.join(path_to_json +'/' + pound)
elif currency=='AED': #replacing the currency with the name of the currency
aed=currency.replace('AED',"Emirati Diram(AED)")
path=os.path.join(path_to_json +'/' + aed)
elif currency=='LKR': #replacing the currency with the name of the currency
lkr=currency.replace('LKR',"Sri Lankan Rupee(LKR)")
path=os.path.join(path_to_json +'/' + lkr)
elif currency=='NZ$': #replacing the currency with the name of the currency
nzd=currency.replace('NZ$',"NewZealand($)")
path=os.path.join(path_to_json +'/' +nzd)
elif currency=='QR': #replacing the currency with the name of the currency
qr=currency.replace('QR',"Qatari Rial(QR)")
path=os.path.join(path_to_json +'/' + qr)
elif currency=='R': #replacing the currency with the name of the currency
r=currency.replace('R',"Rand(R)")
path=os.path.join(path_to_json +'/' + r)
elif currency=='R$': #replacing the currency with the name of the currency
real=currency.replace('R$',"Brazilian Real(R$)")
path=os.path.join(path_to_json +'/' +real)
elif currency=='TL': #replacing the currency with the name of the currency
tl=currency.replace('TL',"Turkish Lira(TL)")
path=os.path.join(path_to_json +'/' +tl)
print(name_res)
path=os.path.join(path +'/' + city)
print(path)
path=os.path.join(path +'/' + locality)
print(path)
path=os.path.join(path +'/' + rating_text)
print(path)
path = path.replace(' /',"/")
#path = path.replace('á',"a")
#path = path.replace('é',"e")
#path = path.replace('ě',"e")
#path = path.replace('I:ba',"I-ba")
#path = upath
#path = unidecode.unidecode(path)
#path = path.decode('unicode_escape')
#print("1")
#if name_res not in ["L'Opera","The Golconda Bowl", "California Cantina", "Dunkin Donuts", "Bardelli's","Baton Rouge"]:
#print(type(path))
ensure_dir(path)
#print("2")
print(path)
path=os.path.join(path,str(name_res)+'_'+str(res_id)+'.json')
print(name_res)
print(path)
with open(path,'w') as json_file:
json.dump(loaded_file[i]['restaurants'][j], json_file) #dump every json in its path
#else:
# continue
path=""
csvfile=open(path_to_folder+'/'+'zomato_new.csv', 'w', encoding="utf-8", newline="") #open the csvfile
writer=csv.writer(csvfile) #write the csv file using writer
writer.writerow(['Restaurant ID','Restaurant Name','Country Code','City','Address','Locality','Locality Verbose','Longitude','Latitude','Cuisines','Average Cost for two','Currency','Has Table booking','Has Online delivery','Is delivering now','Switch to order menu','Price range','Aggregate rating','Rating color','Rating text','Votes']) #writer to write the row
for root,dirs,files in os.walk(path_to_json):
for f in files: #check every json file
if f.endswith('.json'):
with open(os.path.join(root,f), 'r', encoding='utf-8', errors="ignore") as json_file:
loaded_file = json.load(json_file,strict=False) #load the file to be selected
currency=loaded_file['restaurant']['currency']
city=loaded_file['restaurant']['location']['city']
locality=loaded_file['restaurant']['location']['locality']
rating_text=loaded_file['restaurant']['user_rating']['rating_text']
name_res=loaded_file['restaurant']['name']
res_id=loaded_file['restaurant']['R']['res_id']
cuisines=loaded_file['restaurant']['cuisines']
has_table_booking=loaded_file['restaurant']['has_table_booking']
has_online_delivery=loaded_file['restaurant']['has_online_delivery']
city_id=loaded_file['restaurant']['location']['city_id']
address=loaded_file['restaurant']['location']['address']
locality_verbose=loaded_file['restaurant']['location']['locality_verbose']
longitude=loaded_file['restaurant']['location']['longitude']
latitude=loaded_file['restaurant']['location']['latitude']
is_delivering_now=loaded_file['restaurant']['is_delivering_now']
country_id=loaded_file['restaurant']['location']['country_id']
price_range=loaded_file['restaurant']['price_range']
switch_to_order_menu=loaded_file['restaurant']['switch_to_order_menu']
aggregate_rating=loaded_file['restaurant']['user_rating']['aggregate_rating']
rating_color=loaded_file['restaurant']['user_rating']['rating_color']
votes=loaded_file['restaurant']['user_rating']['votes']
avg_cost=loaded_file['restaurant']['average_cost_for_two']
if currency=='Rs.': #ensuring the path with the correct currency match
rs=currency.replace('Rs.',"Indian Rupees(Rs.)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,rs,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='IDR':#ensuring the path with the correct currency match
idr=currency.replace('IDR',"Indonesian Rupiah(IDR)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,idr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='P':#ensuring the path with the correct currency match
p=currency.replace('P',"Botswana Pula(P)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,p,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='$': #ensuring the path with the correct currency match
dol=currency.replace('$',"Dollar($)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,dol,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='€':#ensuring the path with the correct currency match
euro=currency.replace('€',"Euro(€)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,euro,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='£': #ensuring the path with the correct currency match
pound=currency.replace('£',"Pounds(£)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,pound,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='IDR':#ensuring the path with the correct currency match
idr=currency.replace('IDR',"Indonesian Rupiah(IDR)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,idr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='AED':#ensuring the path with the correct currency match
aed=currency.replace('AED',"Emirati Diram(AED)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,aed,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='LKR':#ensuring the path with the correct currency match
lkr=currency.replace('LKR',"Sri Lankan Rupee(LKR)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,lkr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='NZ$': #ensuring the path with the correct currency match
nzd=currency.replace('NZ$',"NewZealand($)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,nzd,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='QR':#ensuring the path with the correct currency match
qr=currency.replace('QR',"Qatari Rial(QR)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,qr,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='R':#ensuring the path with the correct currency match
r=currency.replace('R',"Rand(R)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,r,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='R$':#ensuring the path with the correct currency match
real=currency.replace('R$',"Brazilian Real(R$)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,real,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
elif currency=='TL':#ensuring the path with the correct currency match
tl=currency.replace('TL',"Turkish Lira(TL)")
writer.writerow([res_id,name_res,country_id,city,address,locality,locality_verbose,longitude,latitude,cuisines,avg_cost,tl,has_table_booking,has_online_delivery,is_delivering_now,switch_to_order_menu,price_range,aggregate_rating,rating_color,rating_text,votes]) #export the data in the required csv format
zomato=pd.read_csv(path_to_folder+'/'+'zomato_new.csv',sep=',', encoding='latin-1') #read csv
zomato.head()
Restaurant ID | Restaurant Name | Country Code | City | Address | Locality | Locality Verbose | Longitude | Latitude | Cuisines | ... | Currency | Has Table booking | Has Online delivery | Is delivering now | Switch to order menu | Price range | Aggregate rating | Rating color | Rating text | Votes | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 6310675 | Mama Lou's Italian Kitchen | 162 | Las Piñas City | Block 1, Lot 36, Tropical Avenue Corner Tropic... | BF International | BF International, Las Piñas City | 121.009787 | 14.447615 | Italian | ... | Botswana Pula(P) | 1 | 0 | 0 | 0 | 3 | 4.6 | 3F7E00 | Excellent | 619 |
1 | 6310675 | Mama Lou's Italian Kitchen | 162 | Las Piñas City | Block 1, Lot 36, Tropical Avenue Corner Tropic... | BF International | BF International, Las Piñas City | 121.009787 | 14.447615 | Italian | ... | Botswana Pula(P) | 1 | 0 | 0 | 0 | 3 | 4.6 | 3F7E00 | Excellent | 619 |
2 | 6314542 | Blackbird | 162 | Makati City | Nielson Tower, Ayala Triangle Gardens, Salcedo... | Ayala Triangle Gardens, Salcedo Village, Makat... | Ayala Triangle Gardens, Salcedo Village, Makat... | 121.024562 | 14.556042 | European, Asian | ... | Botswana Pula(P) | 0 | 0 | 0 | 0 | 4 | 4.7 | 3F7E00 | Excellent | 469 |
3 | 6301293 | Banapple | 162 | Makati City | Ayala Triangle Gardens, Salcedo Village, Makat... | Ayala Triangle Gardens, Salcedo Village, Makat... | Ayala Triangle Gardens, Salcedo Village, Makat... | 121.023171 | 14.556196 | Filipino, American, Italian, Bakery | ... | Botswana Pula(P) | 0 | 0 | 0 | 0 | 3 | 4.4 | 5BA829 | Very Good | 867 |
4 | 6315689 | Bad Bird | 162 | Makati City | Hole In The Wall, Floor 4, Century City Mall, ... | Century City Mall, Poblacion, Makati City | Century City Mall, Poblacion, Makati City, Mak... | 121.027708 | 14.565899 | American | ... | Botswana Pula(P) | 0 | 0 | 0 | 0 | 3 | 4.4 | 5BA829 | Very Good | 858 |
5 rows × 21 columns
zomato.shape
(7505, 21)
csvfile.close()