-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathget_data_writefile.py
More file actions
113 lines (76 loc) · 2.85 KB
/
get_data_writefile.py
File metadata and controls
113 lines (76 loc) · 2.85 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
from selenium import webdriver
from pyvirtualdisplay import Display
from BeautifulSoup import BeautifulSoup
from gobal_variables import *
import csv
import threading
totaldata = [] # To store successfully extracted Epic numbers data
not_matched =[] # to store Epic numbers whose data is not found
# Jvascript code to add Epic number in input field........"
def run_script(Driver,id):
print "Extracting data for "+str(id)
Script_EpicNo = '''
var id = document.getElementById("txtEPICNo");
id.value = "'''+str(id)+'''";''' # Sript to input Epic number
Driver.execute_script(Script_EpicNo) # Running srcipt
#Extracting information and writing in csv files
def selectEpicNo(Driver,idList):
huhh = [7,9,10,11,12]
head['7']='Name'
head['9']='Fathers Name'
head['10']='Age'
head['11']='Sex'
head['12']='Epic No'
for id in idList:
run_script(Driver,id) # Running script to search different Epic number
Button = Driver.find_element_by_id("Button1")
Button.click() # Click Search
soup = BeautifulSoup(Driver.page_source) # Using BeautifulSoup for extracting data from web page
#print soup
data ={}
try:
Table = soup.find('table',{'id':'gvSearchResult'}) #Searching for table if data is there
Rows = Table.findChildren('tr')
#print Rows
for row in Rows:
td = row.findChildren('td')
#print td
i =1
for d in td:
if i in huhh:
f = d.string
#print f.encode('utf-8')
data[head[str(i)]]=f.encode('utf-8') #Extracting different data
i += 1
#print data
totaldata.append(data)
print "Data extracted for "+str(id)
except: # Run if no mactch found for corresponding Epic number
not_matched.append(id)
print "No data found for "+str(id)
# Writing extracting data in csv file
with open('output/extracted_ids.csv', 'wb') as csvfile:
w = csv.DictWriter(csvfile,fieldnames=['Epic No','Name','Fathers Name','Age','Sex'])
w.writeheader()
for data in totaldata:
w.writerow(data)
# Writing not extracted numbers in csv
with open('output/not_extracted_ids.csv', 'wb') as csvfile:
w = csv.writer(csvfile,delimiter=' ',quotechar='|', quoting=csv.QUOTE_MINIMAL)
for data in not_matched:
w.writerow(data)
# Running Webdrivers and getting unique cookie
def selectCity(idList):
display = Display(visible=0) #Display of browser is zero
display.start()
Driver = webdriver.Firefox() #Openning web browser
Driver.get(base_url) #Open base_url
Driver.execute_script(script) # Running script to select state FAIZABAD
Button = Driver.find_element_by_id("Button1")
Button.click()
Cookie = Driver.get_cookies()
#print Cookie
selectEpicNo(Driver,idList) #Calling function to extract information of different Epic numbers
Driver.close() #Closing browser
display.stop()
return 1