Skip to content

Commit

Permalink
Fetch Current BoxOffice Shows.
Browse files Browse the repository at this point in the history
  • Loading branch information
Pratik-Sanghani committed Jul 19, 2018
1 parent be74bb6 commit 40c444c
Showing 1 changed file with 55 additions and 52 deletions.
107 changes: 55 additions & 52 deletions windows/benji.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import cv2
import tweepy
from tweepy import OAuthHandler
#import twitterCredentials
import twitterCredentials

requests.packages.urllib3.disable_warnings()
try:
Expand Down Expand Up @@ -420,60 +420,63 @@ def main():

#Box Office Status
elif link[-1] == "boxoffice":
url="https://in.bookmyshow.com/"+ link[0] +"/movies/nowshowing"
r= requests.get(url)
soup = BeautifulSoup(r.content,'html.parser')

soup_level2=[]
show_status_list=[]
shows_list = soup.find_all('div',attrs={'class':'card-container wow fadeIn movie-card-container'})
for i in shows_list:
start = str(i).index("href=")
end = str(i).index("title=")
soup_level2.append("https://in.bookmyshow.com"+str(i)[start+6:end-2])

show_status_raw = soup.find_all('div',attrs={'class':'popularity sa-data-plugin'})
for i in show_status_raw:
start = str(i).index("data-coming-soon=")
end = str(i).index('data-event-code')
data = str(i)[start+18:end-2]

if data == "false":
show_status_list.append("In Cinemas Now...")
if data == "true":
show_status_list.append("Coming Soon...")

Tags_list=[]
Name_list=[]

for url in soup_level2:
try:
url="https://in.bookmyshow.com/"+ link[0] +"/movies/nowshowing"
r= requests.get(url)
tags = BeautifulSoup(r.content,'html.parser')
Tags_raw = tags.find_all('span',attrs={'class':'__genre-tag'})
tmp_tags = ""
for i in Tags_raw:
tmp_tags = tmp_tags + str(i)[str(i).index('">')+2:str(i).index("</span>")] + " - "
Tags_list.append(tmp_tags[:-3])

Names_raw = tags.find_all('h1',attrs={'class':'__name'})
for i in Names_raw:
Name_list.append(str(i)[str(i).index('">')+2:str(i).index("</h1>")])

speak.say("Preparing List")
speak.runAndWait()
cntr = len(Name_list)
print("----------------------------------------------")
print(link[0].capitalize())
print("----------------------------------------------")
print("")
for i in range(cntr):
print("Name : "+ Name_list[i])
print("Tags : " + Tags_list[i])
print("Status : "+ show_status_list[i])
print("")
soup = BeautifulSoup(r.content,'html.parser')

soup_level2=[]
show_status_list=[]
shows_list = soup.find_all('div',attrs={'class':'card-container wow fadeIn movie-card-container'})
for i in shows_list:
start = str(i).index("href=")
end = str(i).index("title=")
soup_level2.append("https://in.bookmyshow.com"+str(i)[start+6:end-2])

show_status_raw = soup.find_all('div',attrs={'class':'popularity sa-data-plugin'})
for i in show_status_raw:
start = str(i).index("data-coming-soon=")
end = str(i).index('data-event-code')
data = str(i)[start+18:end-2]

if data == "false":
show_status_list.append("In Cinemas Now...")
if data == "true":
show_status_list.append("Coming Soon...")

Tags_list=[]
Name_list=[]

for url in soup_level2:
r= requests.get(url)
tags = BeautifulSoup(r.content,'html.parser')
Tags_raw = tags.find_all('span',attrs={'class':'__genre-tag'})
tmp_tags = ""
for i in Tags_raw:
tmp_tags = tmp_tags + str(i)[str(i).index('">')+2:str(i).index("</span>")] + " - "
Tags_list.append(tmp_tags[:-3])

Names_raw = tags.find_all('h1',attrs={'class':'__name'})
for i in Names_raw:
Name_list.append(str(i)[str(i).index('">')+2:str(i).index("</h1>")])

speak.say("Preparing List")
speak.runAndWait()
cntr = len(Name_list)
print("----------------------------------------------")
print(link[0].capitalize())
print("----------------------------------------------")
print("")

for i in range(cntr):
print("Name : "+ Name_list[i])
print("Tags : " + Tags_list[i])
print("Status : "+ show_status_list[i])
print("")
print("----------------------------------------------")
print("")
except:
print("Error")

# elif put.startswith(search_pc):
# process=subprocess.Popen("dir /b/s "+link[1],shell=True,stdout=subprocess.PIPE)
# while True:
Expand Down

0 comments on commit 40c444c

Please sign in to comment.