Professional Documents
Culture Documents
2 import requests
3 from csv import writer
4 import tkinter as tk
5 import tkinter.font as tkFont
6
7 url_propertyhub_first = "https://propertyhub.in.th"
8
9 class ScrapingAll():
10 #Bangkok Section
11 def Bangkok():
12 url_propertyhub_bkk =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%81%E0%B8%A3%E0%B8%B8%E0%B8%87%E0%B9%80%E0%B8%97%E0%
B8%9E%E0%B8%A1%E0%B8%AB%E0%B8%B2%E0%B8%99%E0%B8%84%E0%B8%A3"
13 page_propertyhub_bkk = requests.get(url_propertyhub_bkk)
14 soup_propertyhub_bkk =
BeautifulSoup(page_propertyhub_bkk.content,'html.parser')
15 data_propertyhub_bkk = soup_propertyhub_bkk.find_all('div',class_="sc-
152o12i-0 tLuGm sc-i5hg7z-1 iokjfP")
16 with open('Propertyhub_BKK.csv', 'w',encoding='utf-8-sig', newline='') as f:
17 thewriter = writer(f)
18 header = ['Link','Name','Info']
19 thewriter.writerow(header)
20
21 propertyhub_bkk_list = []
22 for propertyhub_bkk in data_propertyhub_bkk :
23 propertyhub_bkk_list.append(propertyhub_bkk.text)
24 name_propertyhub_bkk = propertyhub_bkk.find('a', class_="sc-152o12i-
9 fhmSYQ").text.replace('\n', '')
25 link_propertyhub_bkk = propertyhub_bkk.find('a', class_="sc-152o12i-
9 fhmSYQ")['href']
26 urlinfo_propertyhub_bkk=(url_propertyhub_first+link_propertyhub_bkk)
27
28 urlinfo_propertyhub_bkk=(url_propertyhub_first+link_propertyhub_bkk)
29 pageinfo_propertyhub_bkk = requests.get(urlinfo_propertyhub_bkk)
30 soupinfo_propertyhub_bkk =
BeautifulSoup(pageinfo_propertyhub_bkk.content,'html.parser')
31 datainfo_propertyhub_bkk =
soupinfo_propertyhub_bkk.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
32 propertyhubinfo_bkk_list = []
33 for propertyhubinfo_bkk in datainfo_propertyhub_bkk :
34
propertyhubinfo_bkk_list.append(propertyhubinfo_bkk.text.replace('\xa0',''))
35
36 info_bkk =
[urlinfo_propertyhub_bkk,name_propertyhub_bkk,propertyhubinfo_bkk_list]
37 thewriter.writerow(info_bkk)
38 print("Bangkok : success...")
39 #Ratchada Section
40 def Ratchada():
41 url_propertyhub_ratchada =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/mrt-
%E0%B8%A3%E0%B8%B1%E0%B8%8A%E0%B8%94%E0%B8%B2%E0%B8%A0%E0%B8%B4%E0%B9%80%E0%B8%A9%E0
%B8%81"
42 page_propertyhub_ratchada = requests.get(url_propertyhub_ratchada)
43 soup_propertyhub_ratchada =
BeautifulSoup(page_propertyhub_ratchada.content,'html.parser')
44 data_propertyhub_ratchada =
soup_propertyhub_ratchada.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
45 with open('Propertyhub_ratchada.csv', 'w',encoding='utf-8-sig', newline='')
as f:
46 thewriter = writer(f)
47 header = ['Link','Name','Info']
48 thewriter.writerow(header)
49
50 propertyhub_ratchada_list = []
51 for propertyhub_ratchada in data_propertyhub_ratchada :
52 propertyhub_ratchada_list.append(propertyhub_ratchada.text)
53 name_propertyhub_ratchada = propertyhub_ratchada.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
54 link_propertyhub_ratchada = propertyhub_ratchada.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
55 urlinfo_propertyhub_ratchada=
(url_propertyhub_first+link_propertyhub_ratchada)
56
57 urlinfo_propertyhub_ratchada=
(url_propertyhub_first+link_propertyhub_ratchada)
58 pageinfo_propertyhub_ratchada =
requests.get(urlinfo_propertyhub_ratchada)
59 soupinfo_propertyhub_ratchada =
BeautifulSoup(pageinfo_propertyhub_ratchada.content,'html.parser')
60 datainfo_propertyhub_ratchada =
soupinfo_propertyhub_ratchada.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
61 propertyhubinfo_ratchada_list = []
62 for propertyhubinfo_ratchada in datainfo_propertyhub_ratchada :
63
propertyhubinfo_ratchada_list.append(propertyhubinfo_ratchada.text.replace('\xa0','
'))
64
65 info_ratchada =
[urlinfo_propertyhub_ratchada,name_propertyhub_ratchada,propertyhubinfo_ratchada_lis
t]
66 thewriter.writerow(info_ratchada)
67 print("Ratchada : success...")
68
69 #Muangthong Section
70 def Muangthong():
71 url_propertyhub_Muangthong =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B9%80%E0%B8%A1%E0%B8%B7%E0%B8%AD%E0%B8%87%E0%B8%97%E0%
B8%AD%E0%B8%87%E0%B8%98%E0%B8%B2%E0%B8%99%E0%B8%B5"
72 page_propertyhub_Muangthong = requests.get(url_propertyhub_Muangthong)
73 soup_propertyhub_Muangthong =
BeautifulSoup(page_propertyhub_Muangthong.content,'html.parser')
74 data_propertyhub_Muangthong =
soup_propertyhub_Muangthong.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
75 with open('Propertyhub_Muangthong.csv', 'w',encoding='utf-8-sig',
newline='') as f:
76 thewriter = writer(f)
77 header = ['Link','Name','Info']
78 thewriter.writerow(header)
79
80 propertyhub_Muangthong_list = []
81 for propertyhub_Muangthong in data_propertyhub_Muangthong :
82 propertyhub_Muangthong_list.append(propertyhub_Muangthong.text)
83 name_propertyhub_Muangthong = propertyhub_Muangthong.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
84 link_propertyhub_Muangthong = propertyhub_Muangthong.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
85 urlinfo_propertyhub_Muangthong=
(url_propertyhub_first+link_propertyhub_Muangthong)
86
87 urlinfo_propertyhub_Muangthong=
(url_propertyhub_first+link_propertyhub_Muangthong)
88 pageinfo_propertyhub_Muangthong =
requests.get(urlinfo_propertyhub_Muangthong)
89 soupinfo_propertyhub_Muangthong =
BeautifulSoup(pageinfo_propertyhub_Muangthong.content,'html.parser')
90 datainfo_propertyhub_Muangthong =
soupinfo_propertyhub_Muangthong.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
91 propertyhubinfo_Muangthong_list = []
92 for propertyhubinfo_Muangthong in datainfo_propertyhub_Muangthong :
93
propertyhubinfo_Muangthong_list.append(propertyhubinfo_Muangthong.text.replace('\xa
0',''))
94
95 info_Muangthong =
[urlinfo_propertyhub_Muangthong,name_propertyhub_Muangthong,propertyhubinfo_Muangtho
ng_list]
96 thewriter.writerow(info_Muangthong)
97 print("Muangthong : success...")
98
99 #Sukumvit Section
100 def Sukumvit():
101 url_propertyhub_Sukumvit =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%96%E0%B8%99%E0%B8%99%E0%B8%AA%E0%B8%B8%E0%B8%82%E0%
B8%B8%E0%B8%A1%E0%B8%A7%E0%B8%B4%E0%B8%97"
102 page_propertyhub_Sukumvit = requests.get(url_propertyhub_Sukumvit)
103 soup_propertyhub_Sukumvit =
BeautifulSoup(page_propertyhub_Sukumvit.content,'html.parser')
104 data_propertyhub_Sukumvit =
soup_propertyhub_Sukumvit.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
iokjfP")
105 with open('Propertyhub_Sukumvit.csv', 'w',encoding='utf-8-sig', newline='')
as f:
106 thewriter = writer(f)
107 header = ['Link','Name','Info']
108 thewriter.writerow(header)
109
110 propertyhub_Sukumvit_list = []
111 for propertyhub_Sukumvit in data_propertyhub_Sukumvit :
112 propertyhub_Sukumvit_list.append(propertyhub_Sukumvit.text)
113 name_propertyhub_Sukumvit = propertyhub_Sukumvit.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
114 link_propertyhub_Sukumvit = propertyhub_Sukumvit.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
115 urlinfo_propertyhub_Sukumvit=
(url_propertyhub_first+link_propertyhub_Sukumvit)
116
117 urlinfo_propertyhub_Sukumvit=
(url_propertyhub_first+link_propertyhub_Sukumvit)
118 pageinfo_propertyhub_Sukumvit =
requests.get(urlinfo_propertyhub_Sukumvit)
119 soupinfo_propertyhub_Sukumvit =
BeautifulSoup(pageinfo_propertyhub_Sukumvit.content,'html.parser')
120 datainfo_propertyhub_Sukumvit =
soupinfo_propertyhub_Sukumvit.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
121 propertyhubinfo_Sukumvit_list = []
122 for propertyhubinfo_Sukumvit in datainfo_propertyhub_Sukumvit :
123
propertyhubinfo_Sukumvit_list.append(propertyhubinfo_Sukumvit.text.replace('\xa0','
'))
124
125 info_Sukumvit =
[urlinfo_propertyhub_Sukumvit,name_propertyhub_Sukumvit,propertyhubinfo_Sukumvit_lis
t]
126 thewriter.writerow(info_Sukumvit)
127 print("Sukumvit : success...")
128
129 #Nonthaburi Section
130 def Nonthaburi():
131 url_propertyhub_Nonthaburi =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%99%E0%B8%99%E0%B8%97%E0%B8%9A%E0%B8%B8%E0%B8%A3%E0%
B8%B5"
132 page_propertyhub_Nonthaburi = requests.get(url_propertyhub_Nonthaburi)
133 soup_propertyhub_Nonthaburi =
BeautifulSoup(page_propertyhub_Nonthaburi.content,'html.parser')
134 data_propertyhub_Nonthaburi =
soup_propertyhub_Nonthaburi.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
iokjfP")
135 with open('Propertyhub_Nonthaburi.csv', 'w',encoding='utf-8-sig',
newline='') as f:
136 thewriter = writer(f)
137 header = ['Link','Name','Info']
138 thewriter.writerow(header)
139
140 propertyhub_Nonthaburi_list = []
141 for propertyhub_Nonthaburi in data_propertyhub_Nonthaburi :
142 propertyhub_Nonthaburi_list.append(propertyhub_Nonthaburi.text)
143 name_propertyhub_Nonthaburi = propertyhub_Nonthaburi.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
144 link_propertyhub_Nonthaburi = propertyhub_Nonthaburi.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
145 urlinfo_propertyhub_Nonthaburi=
(url_propertyhub_first+link_propertyhub_Nonthaburi)
146
147 urlinfo_propertyhub_Nonthaburi=
(url_propertyhub_first+link_propertyhub_Nonthaburi)
148 pageinfo_propertyhub_Nonthaburi =
requests.get(urlinfo_propertyhub_Nonthaburi)
149 soupinfo_propertyhub_Nonthaburi =
BeautifulSoup(pageinfo_propertyhub_Nonthaburi.content,'html.parser')
150 datainfo_propertyhub_Nonthaburi =
soupinfo_propertyhub_Nonthaburi.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
151 propertyhubinfo_Nonthaburi_list = []
152 for propertyhubinfo_Nonthaburi in datainfo_propertyhub_Nonthaburi :
153
propertyhubinfo_Nonthaburi_list.append(propertyhubinfo_Nonthaburi.text.replace('\xa
0',''))
154
155 info_Nonthaburi =
[urlinfo_propertyhub_Nonthaburi,name_propertyhub_Nonthaburi,propertyhubinfo_Nonthabu
ri_list]
156 thewriter.writerow(info_Nonthaburi)
157 print("Nonthaburi : success...")
158
159 #Chula Section
160 def Chula():
161 url_propertyhub_Chula =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%88%E0%B8%B8%E0%B8%AC%E0%B8%B2%E0%B8%A5%E0%B8%87%E0%
B8%81%E0%B8%A3%E0%B8%93%E0%B9%8C%E0%B8%A1%E0%B8%AB%E0%B8%B2%E0%B8%A7%E0%B8%B4%E0%B8%
97%E0%B8%A2%E0%B8%B2%E0%B8%A5%E0%B8%B1%E0%B8%A2"
162 page_propertyhub_Chula = requests.get(url_propertyhub_Chula)
163 soup_propertyhub_Chula =
BeautifulSoup(page_propertyhub_Chula.content,'html.parser')
164 data_propertyhub_Chula = soup_propertyhub_Chula.find_all('div',class_="sc-
152o12i-0 tLuGm sc-i5hg7z-1 iokjfP")
165 with open('Propertyhub_Chula.csv', 'w',encoding='utf-8-sig', newline='') as
f:
166 thewriter = writer(f)
167 header = ['Link','Name','Info']
168 thewriter.writerow(header)
169
170 propertyhub_Chula_list = []
171 for propertyhub_Chula in data_propertyhub_Chula :
172 propertyhub_Chula_list.append(propertyhub_Chula.text)
173 name_propertyhub_Chula = propertyhub_Chula.find('a', class_="sc-
152o12i-9 fhmSYQ").text.replace('\n', '')
174 link_propertyhub_Chula = propertyhub_Chula.find('a', class_="sc-
152o12i-9 fhmSYQ")['href']
175 urlinfo_propertyhub_Chula=
(url_propertyhub_first+link_propertyhub_Chula)
176
177 urlinfo_propertyhub_Chula=
(url_propertyhub_first+link_propertyhub_Chula)
178 pageinfo_propertyhub_Chula = requests.get(urlinfo_propertyhub_Chula)
179 soupinfo_propertyhub_Chula =
BeautifulSoup(pageinfo_propertyhub_Chula.content,'html.parser')
180 datainfo_propertyhub_Chula =
soupinfo_propertyhub_Chula.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
181 propertyhubinfo_Chula_list = []
182 for propertyhubinfo_Chula in datainfo_propertyhub_Chula :
183
propertyhubinfo_Chula_list.append(propertyhubinfo_Chula.text.replace('\xa0',''))
184
185 info_Chula =
[urlinfo_propertyhub_Chula,name_propertyhub_Chula,propertyhubinfo_Chula_list]
186 thewriter.writerow(info_Chula)
187 print("Chula : success...")
188
189 #Ladprao Section
190 def Ladprao():
191 url_propertyhub_Ladprao =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%96%E0%B8%99%E0%B8%99%E0%B8%A5%E0%B8%B2%E0%B8%94%E0%
B8%9E%E0%B8%A3%E0%B9%89%E0%B8%B2%E0%B8%A7"
192 page_propertyhub_Ladprao = requests.get(url_propertyhub_Ladprao)
193 soup_propertyhub_Ladprao =
BeautifulSoup(page_propertyhub_Ladprao.content,'html.parser')
194 data_propertyhub_Ladprao =
soup_propertyhub_Ladprao.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
195 with open('Propertyhub_Ladprao.csv', 'w',encoding='utf-8-sig', newline='')
as f:
196 thewriter = writer(f)
197 header = ['Link','Name','Info']
198 thewriter.writerow(header)
199
200 propertyhub_Ladprao_list = []
201 for propertyhub_Ladprao in data_propertyhub_Ladprao :
202 propertyhub_Ladprao_list.append(propertyhub_Ladprao.text)
203 name_propertyhub_Ladprao = propertyhub_Ladprao.find('a', class_="sc-
152o12i-9 fhmSYQ").text.replace('\n', '')
204 link_propertyhub_Ladprao = propertyhub_Ladprao.find('a', class_="sc-
152o12i-9 fhmSYQ")['href']
205 urlinfo_propertyhub_Ladprao=
(url_propertyhub_first+link_propertyhub_Ladprao)
206
207 urlinfo_propertyhub_Ladprao=
(url_propertyhub_first+link_propertyhub_Ladprao)
208 pageinfo_propertyhub_Ladprao =
requests.get(urlinfo_propertyhub_Ladprao)
209 soupinfo_propertyhub_Ladprao =
BeautifulSoup(pageinfo_propertyhub_Ladprao.content,'html.parser')
210 datainfo_propertyhub_Ladprao =
soupinfo_propertyhub_Ladprao.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
211 propertyhubinfo_Ladprao_list = []
212 for propertyhubinfo_Ladprao in datainfo_propertyhub_Ladprao :
213
propertyhubinfo_Ladprao_list.append(propertyhubinfo_Ladprao.text.replace('\xa0','')
)
214
215 info_Ladprao =
[urlinfo_propertyhub_Ladprao,name_propertyhub_Ladprao,propertyhubinfo_Ladprao_list]
216 thewriter.writerow(info_Ladprao)
217 print("Ladprao : success...")
218
219 #Rama9 Section
220 def Rama9():
221 url_propertyhub_Rama9 =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/mrt-
%E0%B8%9E%E0%B8%A3%E0%B8%B0%E0%B8%A3%E0%B8%B2%E0%B8%A1-9"
222 page_propertyhub_Rama9 = requests.get(url_propertyhub_Rama9)
223 soup_propertyhub_Rama9 =
BeautifulSoup(page_propertyhub_Rama9.content,'html.parser')
224 data_propertyhub_Rama9 = soup_propertyhub_Rama9.find_all('div',class_="sc-
152o12i-0 tLuGm sc-i5hg7z-1 iokjfP")
225 with open('Propertyhub_Rama9.csv', 'w',encoding='utf-8-sig', newline='') as
f:
226 thewriter = writer(f)
227 header = ['Link','Name','Info']
228 thewriter.writerow(header)
229
230 propertyhub_Rama9_list = []
231 for propertyhub_Rama9 in data_propertyhub_Rama9 :
232 propertyhub_Rama9_list.append(propertyhub_Rama9.text)
233 name_propertyhub_Rama9 = propertyhub_Rama9.find('a', class_="sc-
152o12i-9 fhmSYQ").text.replace('\n', '')
234 link_propertyhub_Rama9 = propertyhub_Rama9.find('a', class_="sc-
152o12i-9 fhmSYQ")['href']
235 urlinfo_propertyhub_Rama9=
(url_propertyhub_first+link_propertyhub_Rama9)
236
237 urlinfo_propertyhub_Rama9=
(url_propertyhub_first+link_propertyhub_Rama9)
238 pageinfo_propertyhub_Rama9 = requests.get(urlinfo_propertyhub_Rama9)
239 soupinfo_propertyhub_Rama9 =
BeautifulSoup(pageinfo_propertyhub_Rama9.content,'html.parser')
240 datainfo_propertyhub_Rama9 =
soupinfo_propertyhub_Rama9.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
241 propertyhubinfo_Rama9_list = []
242 for propertyhubinfo_Rama9 in datainfo_propertyhub_Rama9 :
243
propertyhubinfo_Rama9_list.append(propertyhubinfo_Rama9.text.replace('\xa0',''))
244
245 info_Rama9 =
[urlinfo_propertyhub_Rama9,name_propertyhub_Rama9,propertyhubinfo_Rama9_list]
246 thewriter.writerow(info_Rama9)
247 print("Rama9 : success...")
248
249 #Ramkhamhaeng Section
250 def Ramkhamhaeng():
251 url_propertyhub_Ramkhamhaeng =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%96%E0%B8%99%E0%B8%99%E0%B8%A3%E0%B8%B2%E0%B8%A1%E0%
B8%84%E0%B8%B3%E0%B9%81%E0%B8%AB%E0%B8%87"
252 page_propertyhub_Ramkhamhaeng = requests.get(url_propertyhub_Ramkhamhaeng)
253 soup_propertyhub_Ramkhamhaeng =
BeautifulSoup(page_propertyhub_Ramkhamhaeng.content,'html.parser')
254 data_propertyhub_Ramkhamhaeng =
soup_propertyhub_Ramkhamhaeng.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
255 with open('Propertyhub_Ramkhamhaeng.csv', 'w',encoding='utf-8-sig',
newline='') as f:
256 thewriter = writer(f)
257 header = ['Link','Name','Info']
258 thewriter.writerow(header)
259
260 propertyhub_Ramkhamhaeng_list = []
261 for propertyhub_Ramkhamhaeng in data_propertyhub_Ramkhamhaeng :
262 propertyhub_Ramkhamhaeng_list.append(propertyhub_Ramkhamhaeng.text)
263 name_propertyhub_Ramkhamhaeng = propertyhub_Ramkhamhaeng.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
264 link_propertyhub_Ramkhamhaeng = propertyhub_Ramkhamhaeng.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
265 urlinfo_propertyhub_Ramkhamhaeng=
(url_propertyhub_first+link_propertyhub_Ramkhamhaeng)
266
267 urlinfo_propertyhub_Ramkhamhaeng=
(url_propertyhub_first+link_propertyhub_Ramkhamhaeng)
268 pageinfo_propertyhub_Ramkhamhaeng =
requests.get(urlinfo_propertyhub_Ramkhamhaeng)
269 soupinfo_propertyhub_Ramkhamhaeng =
BeautifulSoup(pageinfo_propertyhub_Ramkhamhaeng.content,'html.parser')
270 datainfo_propertyhub_Ramkhamhaeng =
soupinfo_propertyhub_Ramkhamhaeng.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
271 propertyhubinfo_Ramkhamhaeng_list = []
272 for propertyhubinfo_Ramkhamhaeng in
datainfo_propertyhub_Ramkhamhaeng :
273
propertyhubinfo_Ramkhamhaeng_list.append(propertyhubinfo_Ramkhamhaeng.text.replace(
'\xa0',''))
274
275 info_Ramkhamhaeng =
[urlinfo_propertyhub_Ramkhamhaeng,name_propertyhub_Ramkhamhaeng,propertyhubinfo_Ramk
hamhaeng_list]
276 thewriter.writerow(info_Ramkhamhaeng)
277 print("Ramkhamhaeng : success...")
278
279 #Onnuch Section
280 def Onnuch():
281 url_propertyhub_Onnuch =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/bts-
%E0%B8%AD%E0%B9%88%E0%B8%AD%E0%B8%99%E0%B8%99%E0%B8%B8%E0%B8%8A"
282 page_propertyhub_Onnuch = requests.get(url_propertyhub_Onnuch)
283 soup_propertyhub_Onnuch =
BeautifulSoup(page_propertyhub_Onnuch.content,'html.parser')
284 data_propertyhub_Onnuch = soup_propertyhub_Onnuch.find_all('div',class_="sc-
152o12i-0 tLuGm sc-i5hg7z-1 hwrlNi")
285 with open('Propertyhub_Onnuch.csv', 'w',encoding='utf-8-sig', newline='') as
f:
286 thewriter = writer(f)
287 header = ['Link','Name','Info']
288 thewriter.writerow(header)
289
290 propertyhub_Onnuch_list = []
291 for propertyhub_Onnuch in data_propertyhub_Onnuch :
292 propertyhub_Onnuch_list.append(propertyhub_Onnuch.text)
293 name_propertyhub_Onnuch = propertyhub_Onnuch.find('a', class_="sc-
152o12i-9 fhmSYQ").text.replace('\n', '')
294 link_propertyhub_Onnuch = propertyhub_Onnuch.find('a', class_="sc-
152o12i-9 fhmSYQ")['href']
295 urlinfo_propertyhub_Onnuch=
(url_propertyhub_first+link_propertyhub_Onnuch)
296
297 urlinfo_propertyhub_Onnuch=
(url_propertyhub_first+link_propertyhub_Onnuch)
298 pageinfo_propertyhub_Onnuch =
requests.get(urlinfo_propertyhub_Onnuch)
299 soupinfo_propertyhub_Onnuch =
BeautifulSoup(pageinfo_propertyhub_Onnuch.content,'html.parser')
300 datainfo_propertyhub_Onnuch =
soupinfo_propertyhub_Onnuch.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
301 propertyhubinfo_Onnuch_list = []
302 for propertyhubinfo_Onnuch in datainfo_propertyhub_Onnuch :
303
propertyhubinfo_Onnuch_list.append(propertyhubinfo_Onnuch.text.replace('\xa0',''))
304
305 info_Onnuch =
[urlinfo_propertyhub_Onnuch,name_propertyhub_Onnuch,propertyhubinfo_Onnuch_list]
306 thewriter.writerow(info_Onnuch)
307 print("Onnuch : success...")
308
309 #Chaengwattana Section
310 def Chaengwattana():
311 url_propertyhub_Chaengwattana =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%96%E0%B8%99%E0%B8%99%E0%B9%81%E0%B8%88%E0%B9%89%E0%
B8%87%E0%B8%A7%E0%B8%B1%E0%B8%92%E0%B8%99%E0%B8%B0"
312 page_propertyhub_Chaengwattana = requests.get(url_propertyhub_Chaengwattana)
313 soup_propertyhub_Chaengwattana =
BeautifulSoup(page_propertyhub_Chaengwattana.content,'html.parser')
314 data_propertyhub_Chaengwattana =
soup_propertyhub_Chaengwattana.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
315 with open('Propertyhub_Chaengwattana.csv', 'w',encoding='utf-8-sig',
newline='') as f:
316 thewriter = writer(f)
317 header = ['Link','Name','Info']
318 thewriter.writerow(header)
319
320 propertyhub_Chaengwattana_list = []
321 for propertyhub_Chaengwattana in data_propertyhub_Chaengwattana :
322
propertyhub_Chaengwattana_list.append(propertyhub_Chaengwattana.text)
323 name_propertyhub_Chaengwattana = propertyhub_Chaengwattana.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
324 link_propertyhub_Chaengwattana = propertyhub_Chaengwattana.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
325 urlinfo_propertyhub_Chaengwattana=
(url_propertyhub_first+link_propertyhub_Chaengwattana)
326
327 urlinfo_propertyhub_Chaengwattana=
(url_propertyhub_first+link_propertyhub_Chaengwattana)
328 pageinfo_propertyhub_Chaengwattana =
requests.get(urlinfo_propertyhub_Chaengwattana)
329 soupinfo_propertyhub_Chaengwattana =
BeautifulSoup(pageinfo_propertyhub_Chaengwattana.content,'html.parser')
330 datainfo_propertyhub_Chaengwattana =
soupinfo_propertyhub_Chaengwattana.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
331 propertyhubinfo_Chaengwattana_list = []
332 for propertyhubinfo_Chaengwattana in
datainfo_propertyhub_Chaengwattana :
333
propertyhubinfo_Chaengwattana_list.append(propertyhubinfo_Chaengwattana.text.replac
e('\xa0',''))
334
335 info_Chaengwattana =
[urlinfo_propertyhub_Chaengwattana,name_propertyhub_Chaengwattana,propertyhubinfo_Ch
aengwattana_list]
336 thewriter.writerow(info_Chaengwattana)
337 print("Chaengwattana : success...")
338
339 #SaphanKwai Section
340 def SaphanKwai():
341 url_propertyhub_SaphanKwai =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%AA%E0%B8%B0%E0%B8%9E%E0%B8%B2%E0%B8%99%E0%B8%84%E0%
B8%A7%E0%B8%B2%E0%B8%A2"
342 page_propertyhub_SaphanKwai = requests.get(url_propertyhub_SaphanKwai)
343 soup_propertyhub_SaphanKwai =
BeautifulSoup(page_propertyhub_SaphanKwai.content,'html.parser')
344 data_propertyhub_SaphanKwai =
soup_propertyhub_SaphanKwai.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
345 with open('Propertyhub_SaphanKwai.csv', 'w',encoding='utf-8-sig',
newline='') as f:
346 thewriter = writer(f)
347 header = ['Link','Name','Info']
348 thewriter.writerow(header)
349
350 propertyhub_SaphanKwai_list = []
351 for propertyhub_SaphanKwai in data_propertyhub_SaphanKwai :
352 propertyhub_SaphanKwai_list.append(propertyhub_SaphanKwai.text)
353 name_propertyhub_SaphanKwai = propertyhub_SaphanKwai.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
354 link_propertyhub_SaphanKwai = propertyhub_SaphanKwai.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
355 urlinfo_propertyhub_SaphanKwai=
(url_propertyhub_first+link_propertyhub_SaphanKwai)
356
357 urlinfo_propertyhub_SaphanKwai=
(url_propertyhub_first+link_propertyhub_SaphanKwai)
358 pageinfo_propertyhub_SaphanKwai =
requests.get(urlinfo_propertyhub_SaphanKwai)
359 soupinfo_propertyhub_SaphanKwai =
BeautifulSoup(pageinfo_propertyhub_SaphanKwai.content,'html.parser')
360 datainfo_propertyhub_SaphanKwai =
soupinfo_propertyhub_SaphanKwai.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
361 propertyhubinfo_SaphanKwai_list = []
362 for propertyhubinfo_SaphanKwai in datainfo_propertyhub_SaphanKwai :
363
propertyhubinfo_SaphanKwai_list.append(propertyhubinfo_SaphanKwai.text.replace('\xa
0',''))
364
365 info_SaphanKwai =
[urlinfo_propertyhub_SaphanKwai,name_propertyhub_SaphanKwai,propertyhubinfo_SaphanKw
ai_list]
366 thewriter.writerow(info_SaphanKwai)
367 print("SaphanKwai : success...")
368
369 #Sathon Section
370 def Sathon():
371 url_propertyhub_Sathon =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%AA%E0%B8%B5%E0%B8%A5%E0%B8%A1-
%E0%B8%AA%E0%B8%B2%E0%B8%98%E0%B8%A3"
372 page_propertyhub_Sathon = requests.get(url_propertyhub_Sathon)
373 soup_propertyhub_Sathon =
BeautifulSoup(page_propertyhub_Sathon.content,'html.parser')
374 data_propertyhub_Sathon = soup_propertyhub_Sathon.find_all('div',class_="sc-
152o12i-0 tLuGm sc-i5hg7z-1 iokjfP")
375 with open('Propertyhub_Sathon.csv', 'w',encoding='utf-8-sig', newline='') as
f:
376 thewriter = writer(f)
377 header = ['Link','Name','Info']
378 thewriter.writerow(header)
379
380 propertyhub_Sathon_list = []
381 for propertyhub_Sathon in data_propertyhub_Sathon :
382 propertyhub_Sathon_list.append(propertyhub_Sathon.text)
383 name_propertyhub_Sathon = propertyhub_Sathon.find('a', class_="sc-
152o12i-9 fhmSYQ").text.replace('\n', '')
384 link_propertyhub_Sathon = propertyhub_Sathon.find('a', class_="sc-
152o12i-9 fhmSYQ")['href']
385 urlinfo_propertyhub_Sathon=
(url_propertyhub_first+link_propertyhub_Sathon)
386
387 urlinfo_propertyhub_Sathon=
(url_propertyhub_first+link_propertyhub_Sathon)
388 pageinfo_propertyhub_Sathon =
requests.get(urlinfo_propertyhub_Sathon)
389 soupinfo_propertyhub_Sathon =
BeautifulSoup(pageinfo_propertyhub_Sathon.content,'html.parser')
390 datainfo_propertyhub_Sathon =
soupinfo_propertyhub_Sathon.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
391 propertyhubinfo_Sathon_list = []
392 for propertyhubinfo_Sathon in datainfo_propertyhub_Sathon :
393
propertyhubinfo_Sathon_list.append(propertyhubinfo_Sathon.text.replace('\xa0',''))
394
395 info_Sathon =
[urlinfo_propertyhub_Sathon,name_propertyhub_Sathon,propertyhubinfo_Sathon_list]
396 thewriter.writerow(info_Sathon)
397 print("Sathon : success...")
398
399 #Phayathai Section
400 def Phayathai():
401 url_propertyhub_Phayathai =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/bts-%E0%B8%9E%E0%B8%8D%E0%B8%B2%E0%B9%84%E0%B8%97"
402 page_propertyhub_Phayathai = requests.get(url_propertyhub_Phayathai)
403 soup_propertyhub_Phayathai =
BeautifulSoup(page_propertyhub_Phayathai.content,'html.parser')
404 data_propertyhub_Phayathai =
soup_propertyhub_Phayathai.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
405 with open('Propertyhub_Phayathai.csv', 'w',encoding='utf-8-sig', newline='')
as f:
406 thewriter = writer(f)
407 header = ['Link','Name','Info']
408 thewriter.writerow(header)
409
410 propertyhub_Phayathai_list = []
411 for propertyhub_Phayathai in data_propertyhub_Phayathai :
412 propertyhub_Phayathai_list.append(propertyhub_Phayathai.text)
413 name_propertyhub_Phayathai = propertyhub_Phayathai.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
414 link_propertyhub_Phayathai = propertyhub_Phayathai.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
415 urlinfo_propertyhub_Phayathai=
(url_propertyhub_first+link_propertyhub_Phayathai)
416
417 urlinfo_propertyhub_Phayathai=
(url_propertyhub_first+link_propertyhub_Phayathai)
418 pageinfo_propertyhub_Phayathai =
requests.get(urlinfo_propertyhub_Phayathai)
419 soupinfo_propertyhub_Phayathai =
BeautifulSoup(pageinfo_propertyhub_Phayathai.content,'html.parser')
420 datainfo_propertyhub_Phayathai =
soupinfo_propertyhub_Phayathai.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
421 propertyhubinfo_Phayathai_list = []
422 for propertyhubinfo_Phayathai in datainfo_propertyhub_Phayathai :
423
propertyhubinfo_Phayathai_list.append(propertyhubinfo_Phayathai.text.replace('\xa0'
,''))
424
425 info_Phayathai =
[urlinfo_propertyhub_Phayathai,name_propertyhub_Phayathai,propertyhubinfo_Phayathai_
list]
426 thewriter.writerow(info_Phayathai)
427 print("Phayathai : success...")
428
429 #Pinklao Section
430 def Pinklao():
431 url_propertyhub_Pinklao =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B9%80%E0%B8%8B%E0%B9%87%E0%B8%99%E0%B8%97%E0%B8%A3%E0%
B8%B1%E0%B8%A5-
%E0%B8%9B%E0%B8%B4%E0%B9%88%E0%B8%99%E0%B9%80%E0%B8%81%E0%B8%A5%E0%B9%89%E0%B8%B2"
432 page_propertyhub_Pinklao = requests.get(url_propertyhub_Pinklao)
433 soup_propertyhub_Pinklao =
BeautifulSoup(page_propertyhub_Pinklao.content,'html.parser')
434 data_propertyhub_Pinklao =
soup_propertyhub_Pinklao.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
435 with open('Propertyhub_Pinklao.csv', 'w',encoding='utf-8-sig', newline='')
as f:
436 thewriter = writer(f)
437 header = ['Link','Name','Info']
438 thewriter.writerow(header)
439
440 propertyhub_Pinklao_list = []
441 for propertyhub_Pinklao in data_propertyhub_Pinklao :
442 propertyhub_Pinklao_list.append(propertyhub_Pinklao.text)
443 name_propertyhub_Pinklao = propertyhub_Pinklao.find('a', class_="sc-
152o12i-9 fhmSYQ").text.replace('\n', '')
444 link_propertyhub_Pinklao = propertyhub_Pinklao.find('a', class_="sc-
152o12i-9 fhmSYQ")['href']
445 urlinfo_propertyhub_Pinklao=
(url_propertyhub_first+link_propertyhub_Pinklao)
446
447 urlinfo_propertyhub_Pinklao=
(url_propertyhub_first+link_propertyhub_Pinklao)
448 pageinfo_propertyhub_Pinklao =
requests.get(urlinfo_propertyhub_Pinklao)
449 soupinfo_propertyhub_Pinklao =
BeautifulSoup(pageinfo_propertyhub_Pinklao.content,'html.parser')
450 datainfo_propertyhub_Pinklao =
soupinfo_propertyhub_Pinklao.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
451 propertyhubinfo_Pinklao_list = []
452 for propertyhubinfo_Pinklao in datainfo_propertyhub_Pinklao :
453
propertyhubinfo_Pinklao_list.append(propertyhubinfo_Pinklao.text.replace('\xa0','')
)
454
455 info_Pinklao =
[urlinfo_propertyhub_Pinklao,name_propertyhub_Pinklao,propertyhubinfo_Pinklao_list]
456 thewriter.writerow(info_Pinklao)
457 print("Pinklao : success...")
458
459 #TaladPlu Section
460 def TaladPlu():
461 url_propertyhub_TaladPlu =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/bts-
%E0%B8%95%E0%B8%A5%E0%B8%B2%E0%B8%94%E0%B8%9E%E0%B8%A5%E0%B8%B9"
462 page_propertyhub_TaladPlu = requests.get(url_propertyhub_TaladPlu)
463 soup_propertyhub_TaladPlu =
BeautifulSoup(page_propertyhub_TaladPlu.content,'html.parser')
464 data_propertyhub_TaladPlu =
soup_propertyhub_TaladPlu.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
iokjfP")
465 with open('Propertyhub_TaladPlu.csv', 'w',encoding='utf-8-sig', newline='')
as f:
466 thewriter = writer(f)
467 header = ['Link','Name','Info']
468 thewriter.writerow(header)
469
470 propertyhub_TaladPlu_list = []
471 for propertyhub_TaladPlu in data_propertyhub_TaladPlu :
472 propertyhub_TaladPlu_list.append(propertyhub_TaladPlu.text)
473 name_propertyhub_TaladPlu = propertyhub_TaladPlu.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
474 link_propertyhub_TaladPlu = propertyhub_TaladPlu.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
475 urlinfo_propertyhub_TaladPlu=
(url_propertyhub_first+link_propertyhub_TaladPlu)
476
477 urlinfo_propertyhub_TaladPlu=
(url_propertyhub_first+link_propertyhub_TaladPlu)
478 pageinfo_propertyhub_TaladPlu =
requests.get(urlinfo_propertyhub_TaladPlu)
479 soupinfo_propertyhub_TaladPlu =
BeautifulSoup(pageinfo_propertyhub_TaladPlu.content,'html.parser')
480 datainfo_propertyhub_TaladPlu =
soupinfo_propertyhub_TaladPlu.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
481 propertyhubinfo_TaladPlu_list = []
482 for propertyhubinfo_TaladPlu in datainfo_propertyhub_TaladPlu :
483
propertyhubinfo_TaladPlu_list.append(propertyhubinfo_TaladPlu.text.replace('\xa0','
'))
484
485 info_TaladPlu =
[urlinfo_propertyhub_TaladPlu,name_propertyhub_TaladPlu,propertyhubinfo_TaladPlu_lis
t]
486 thewriter.writerow(info_TaladPlu)
487 print("TaladPlu : success...")
488
489 #Vibhavadi Section
490 def Vibhavadi():
491 url_propertyhub_Vibhavadi =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%96%E0%B8%99%E0%B8%99%E0%B8%A7%E0%B8%B4%E0%B8%A0%E0%
B8%B2%E0%B8%A7%E0%B8%94%E0%B8%B5%E0%B8%A3%E0%B8%B1%E0%B8%87%E0%B8%AA%E0%B8%B4%E0%B8%
95"
492 page_propertyhub_Vibhavadi = requests.get(url_propertyhub_Vibhavadi)
493 soup_propertyhub_Vibhavadi =
BeautifulSoup(page_propertyhub_Vibhavadi.content,'html.parser')
494 data_propertyhub_Vibhavadi =
soup_propertyhub_Vibhavadi.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
iokjfP")
495 with open('Propertyhub_Vibhavadi.csv', 'w',encoding='utf-8-sig', newline='')
as f:
496 thewriter = writer(f)
497 header = ['Link','Name','Info']
498 thewriter.writerow(header)
499
500 propertyhub_Vibhavadi_list = []
501 for propertyhub_Vibhavadi in data_propertyhub_Vibhavadi :
502 propertyhub_Vibhavadi_list.append(propertyhub_Vibhavadi.text)
503 name_propertyhub_Vibhavadi = propertyhub_Vibhavadi.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
504 link_propertyhub_Vibhavadi = propertyhub_Vibhavadi.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
505 urlinfo_propertyhub_Vibhavadi=
(url_propertyhub_first+link_propertyhub_Vibhavadi)
506
507 urlinfo_propertyhub_Vibhavadi=
(url_propertyhub_first+link_propertyhub_Vibhavadi)
508 pageinfo_propertyhub_Vibhavadi =
requests.get(urlinfo_propertyhub_Vibhavadi)
509 soupinfo_propertyhub_Vibhavadi =
BeautifulSoup(pageinfo_propertyhub_Vibhavadi.content,'html.parser')
510 datainfo_propertyhub_Vibhavadi =
soupinfo_propertyhub_Vibhavadi.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
511 propertyhubinfo_Vibhavadi_list = []
512 for propertyhubinfo_Vibhavadi in datainfo_propertyhub_Vibhavadi :
513
propertyhubinfo_Vibhavadi_list.append(propertyhubinfo_Vibhavadi.text.replace('\xa0'
,''))
514
515 info_Vibhavadi =
[urlinfo_propertyhub_Vibhavadi,name_propertyhub_Vibhavadi,propertyhubinfo_Vibhavadi_
list]
516 thewriter.writerow(info_Vibhavadi)
517 print("Vibhavadi : success...")
518
519 #PhraKhanong Section
520 def PhraKhanong():
521 url_propertyhub_PhraKhanong =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/bts-
%E0%B8%9E%E0%B8%A3%E0%B8%B0%E0%B9%82%E0%B8%82%E0%B8%99%E0%B8%87"
522 page_propertyhub_PhraKhanong = requests.get(url_propertyhub_PhraKhanong)
523 soup_propertyhub_PhraKhanong =
BeautifulSoup(page_propertyhub_PhraKhanong.content,'html.parser')
524 data_propertyhub_PhraKhanong =
soup_propertyhub_PhraKhanong.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
iokjfP")
525 with open('Propertyhub_PhraKhanong.csv', 'w',encoding='utf-8-sig',
newline='') as f:
526 thewriter = writer(f)
527 header = ['Link','Name','Info']
528 thewriter.writerow(header)
529
530 propertyhub_PhraKhanong_list = []
531 for propertyhub_PhraKhanong in data_propertyhub_PhraKhanong :
532 propertyhub_PhraKhanong_list.append(propertyhub_PhraKhanong.text)
533 name_propertyhub_PhraKhanong = propertyhub_PhraKhanong.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
534 link_propertyhub_PhraKhanong = propertyhub_PhraKhanong.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
535 urlinfo_propertyhub_PhraKhanong=
(url_propertyhub_first+link_propertyhub_PhraKhanong)
536
537 urlinfo_propertyhub_PhraKhanong=
(url_propertyhub_first+link_propertyhub_PhraKhanong)
538 pageinfo_propertyhub_PhraKhanong =
requests.get(urlinfo_propertyhub_PhraKhanong)
539 soupinfo_propertyhub_PhraKhanong =
BeautifulSoup(pageinfo_propertyhub_PhraKhanong.content,'html.parser')
540 datainfo_propertyhub_PhraKhanong =
soupinfo_propertyhub_PhraKhanong.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
541 propertyhubinfo_PhraKhanong_list = []
542 for propertyhubinfo_PhraKhanong in datainfo_propertyhub_PhraKhanong
:
543
propertyhubinfo_PhraKhanong_list.append(propertyhubinfo_PhraKhanong.text.replace('\
xa0',''))
544
545 info_PhraKhanong =
[urlinfo_propertyhub_PhraKhanong,name_propertyhub_PhraKhanong,propertyhubinfo_PhraKh
anong_list]
546 thewriter.writerow(info_PhraKhanong)
547 print("PhraKhanong : success...")
548
549 #Ratchayothin Section
550 def Ratchayothin():
551 url_propertyhub_Ratchayothin =
f"https://propertyhub.in.th/%E0%B9%80%E0%B8%8A%E0%B9%88%E0%B8%B2%E0%B8%84%E0%B8%AD%E
0%B8%99%E0%B9%82%E0%B8%94/%E0%B8%A3%E0%B8%B1%E0%B8%8A%E0%B9%82%E0%B8%A2%E0%B8%98%E0%
B8%B4%E0%B8%99"
552 page_propertyhub_Ratchayothin = requests.get(url_propertyhub_Ratchayothin)
553 soup_propertyhub_Ratchayothin =
BeautifulSoup(page_propertyhub_Ratchayothin.content,'html.parser')
554 data_propertyhub_Ratchayothin =
soup_propertyhub_Ratchayothin.find_all('div',class_="sc-152o12i-0 tLuGm sc-i5hg7z-1
hwrlNi")
555 with open('Propertyhub_Ratchayothin.csv', 'w',encoding='utf-8-sig',
newline='') as f:
556 thewriter = writer(f)
557 header = ['Link','Name','Info']
558 thewriter.writerow(header)
559
560 propertyhub_Ratchayothin_list = []
561 for propertyhub_Ratchayothin in data_propertyhub_Ratchayothin :
562 propertyhub_Ratchayothin_list.append(propertyhub_Ratchayothin.text)
563 name_propertyhub_Ratchayothin = propertyhub_Ratchayothin.find('a',
class_="sc-152o12i-9 fhmSYQ").text.replace('\n', '')
564 link_propertyhub_Ratchayothin = propertyhub_Ratchayothin.find('a',
class_="sc-152o12i-9 fhmSYQ")['href']
565 urlinfo_propertyhub_Ratchayothin=
(url_propertyhub_first+link_propertyhub_Ratchayothin)
566
567 urlinfo_propertyhub_Ratchayothin=
(url_propertyhub_first+link_propertyhub_Ratchayothin)
568 pageinfo_propertyhub_Ratchayothin =
requests.get(urlinfo_propertyhub_Ratchayothin)
569 soupinfo_propertyhub_Ratchayothin =
BeautifulSoup(pageinfo_propertyhub_Ratchayothin.content,'html.parser')
570 datainfo_propertyhub_Ratchayothin =
soupinfo_propertyhub_Ratchayothin.find_all('div',class_="row sc-ves8oa-17 dkHvsM")
571 propertyhubinfo_Ratchayothin_list = []
572 for propertyhubinfo_Ratchayothin in
datainfo_propertyhub_Ratchayothin :
573
propertyhubinfo_Ratchayothin_list.append(propertyhubinfo_Ratchayothin.text.replace(
'\xa0',''))
574
575 info_Ratchayothin =
[urlinfo_propertyhub_Ratchayothin,name_propertyhub_Ratchayothin,propertyhubinfo_Ratc
hayothin_list]
576 thewriter.writerow(info_Ratchayothin)
577 print("Ratchayothin : success...")
578 #ui section
579 class App:
580 def __init__(self, root):
581 #setting title
582 root.title("Webscrapping Propertyhub")
583 #setting window size
584 width=730
585 height=490
586 screenwidth = root.winfo_screenwidth()
587 screenheight = root.winfo_screenheight()
588 alignstr = '%dx%d+%d+%d' % (width, height, (screenwidth - width) / 2,
(screenheight - height) / 2)
589 root.geometry(alignstr)
590 root.resizable(width=False, height=False)
591
592 btn_label=tk.Label(root)
593 btn_label["activebackground"] = "#000000"
594 btn_label["activeforeground"] = "#000000"
595 btn_label["bg"] = "#e3e3e3"
596 ft = tkFont.Font(family='Times',size=28)
597 btn_label["font"] = ft
598 btn_label["fg"] = "#ffffff"
599 btn_label["justify"] = "center"
600 btn_label["text"] = " "
601 btn_label.place(x=0,y=0,width=730,height=490)
602
603 btn_Bangkok=tk.Button(root)
604 btn_Bangkok["bg"] = "#1f93ff"
605 ft = tkFont.Font(family='Times',size=12)
606 btn_Bangkok["font"] = ft
607 btn_Bangkok["fg"] = "#ffffff"
608 btn_Bangkok["justify"] = "center"
609 btn_Bangkok["text"] = "Bangkok"
610 btn_Bangkok.place(x=70,y=130,width=120,height=40)
611 btn_Bangkok["command"] = self.btn_Bangkok_command
612
613 btn_Ratchada=tk.Button(root)
614 btn_Ratchada["bg"] = "#43a4ff"
615 ft = tkFont.Font(family='Times',size=12)
616 btn_Ratchada["font"] = ft
617 btn_Ratchada["fg"] = "#ffffff"
618 btn_Ratchada["justify"] = "center"
619 btn_Ratchada["text"] = "Ratchada"
620 btn_Ratchada.place(x=70,y=200,width=120,height=40)
621 btn_Ratchada["command"] = self.btn_Ratchada_command
622
623 btn_Muangthong=tk.Button(root)
624 btn_Muangthong["bg"] = "#61b3ff"
625 ft = tkFont.Font(family='Times',size=12)
626 btn_Muangthong["font"] = ft
627 btn_Muangthong["fg"] = "#ffffff"
628 btn_Muangthong["justify"] = "center"
629 btn_Muangthong["text"] = "Muangthong"
630 btn_Muangthong.place(x=70,y=270,width=120,height=40)
631 btn_Muangthong["command"] = self.btn_Muangthong_command
632
633 btn_Sukumvit=tk.Button(root)
634 btn_Sukumvit["bg"] = "#8dc8ff"
635 ft = tkFont.Font(family='Times',size=12)
636 btn_Sukumvit["font"] = ft
637 btn_Sukumvit["fg"] = "#000000"
638 btn_Sukumvit["justify"] = "center"
639 btn_Sukumvit["text"] = "Sukumvit"
640 btn_Sukumvit.place(x=70,y=340,width=120,height=40)
641 btn_Sukumvit["command"] = self.btn_Sukumvit_command
642
643 btn_Nonthaburi=tk.Button(root)
644 btn_Nonthaburi["bg"] = "#cde7ff"
645 ft = tkFont.Font(family='Times',size=12)
646 btn_Nonthaburi["font"] = ft
647 btn_Nonthaburi["fg"] = "#000000"
648 btn_Nonthaburi["justify"] = "center"
649 btn_Nonthaburi["text"] = "Nonthaburi"
650 btn_Nonthaburi.place(x=70,y=410,width=120,height=40)
651 btn_Nonthaburi["command"] = self.btn_Nonthaburi_command
652
653 btn_Chula=tk.Button(root)
654 btn_Chula["bg"] = "#5de65d"
655 btn_Chula["disabledforeground"] = "#000000"
656 ft = tkFont.Font(family='Times',size=12)
657 btn_Chula["font"] = ft
658 btn_Chula["fg"] = "#000000"
659 btn_Chula["justify"] = "center"
660 btn_Chula["text"] = "Chula"
661 btn_Chula.place(x=230,y=130,width=120,height=40)
662 btn_Chula["command"] = self.btn_Chula_command
663
664 btn_Ladprao=tk.Button(root)
665 btn_Ladprao["bg"] = "#87de87"
666 ft = tkFont.Font(family='Times',size=12)
667 btn_Ladprao["font"] = ft
668 btn_Ladprao["fg"] = "#000000"
669 btn_Ladprao["justify"] = "center"
670 btn_Ladprao["text"] = "Ladprao"
671 btn_Ladprao.place(x=230,y=200,width=120,height=40)
672 btn_Ladprao["command"] = self.btn_Ladprao_command
673
674 btn_Rama9=tk.Button(root)
675 btn_Rama9["bg"] = "#a9e7a9"
676 ft = tkFont.Font(family='Times',size=12)
677 btn_Rama9["font"] = ft
678 btn_Rama9["fg"] = "#000000"
679 btn_Rama9["justify"] = "center"
680 btn_Rama9["text"] = "Rama 9"
681 btn_Rama9.place(x=230,y=270,width=120,height=40)
682 btn_Rama9["command"] = self.btn_Rama9_command
683
684 btn_Ramkhamhaeng=tk.Button(root)
685 btn_Ramkhamhaeng["bg"] = "#c2edc2"
686 ft = tkFont.Font(family='Times',size=12)
687 btn_Ramkhamhaeng["font"] = ft
688 btn_Ramkhamhaeng["fg"] = "#000000"
689 btn_Ramkhamhaeng["justify"] = "center"
690 btn_Ramkhamhaeng["text"] = "Ramkhamhaeng"
691 btn_Ramkhamhaeng.place(x=230,y=340,width=120,height=40)
692 btn_Ramkhamhaeng["command"] = self.btn_Ramkhamhaeng_command
693
694 btn_Onnuch=tk.Button(root)
695 btn_Onnuch["bg"] = "#dff4df"
696 ft = tkFont.Font(family='Times',size=12)
697 btn_Onnuch["font"] = ft
698 btn_Onnuch["fg"] = "#000000"
699 btn_Onnuch["justify"] = "center"
700 btn_Onnuch["text"] = "Onnuch"
701 btn_Onnuch.place(x=230,y=410,width=120,height=40)
702 btn_Onnuch["command"] = self.btn_Onnuch_command
703
704 btn_Chaengwattana=tk.Button(root)
705 btn_Chaengwattana["bg"] = "#ff0606"
706 ft = tkFont.Font(family='Times',size=12)
707 btn_Chaengwattana["font"] = ft
708 btn_Chaengwattana["fg"] = "#ffffff"
709 btn_Chaengwattana["justify"] = "center"
710 btn_Chaengwattana["text"] = "Chaengwattana"
711 btn_Chaengwattana.place(x=390,y=130,width=120,height=40)
712 btn_Chaengwattana["command"] = self.btn_Chaengwattana_command
713
714 btn_Saphankwai=tk.Button(root)
715 btn_Saphankwai["bg"] = "#f83434"
716 ft = tkFont.Font(family='Times',size=12)
717 btn_Saphankwai["font"] = ft
718 btn_Saphankwai["fg"] = "#ffffff"
719 btn_Saphankwai["justify"] = "center"
720 btn_Saphankwai["text"] = "SaphanKwai"
721 btn_Saphankwai.place(x=390,y=200,width=120,height=40)
722 btn_Saphankwai["command"] = self.btn_Saphankwai_command
723
724 btn_Sathon=tk.Button(root)
725 btn_Sathon["bg"] = "#ff5a5a"
726 ft = tkFont.Font(family='Times',size=12)
727 btn_Sathon["font"] = ft
728 btn_Sathon["fg"] = "#ffffff"
729 btn_Sathon["justify"] = "center"
730 btn_Sathon["text"] = "Sathon"
731 btn_Sathon.place(x=390,y=270,width=120,height=40)
732 btn_Sathon["command"] = self.btn_Sathon_command
733
734 btn_Phayathai=tk.Button(root)
735 btn_Phayathai["bg"] = "#ff9a9a"
736 ft = tkFont.Font(family='Times',size=12)
737 btn_Phayathai["font"] = ft
738 btn_Phayathai["fg"] = "#000000"
739 btn_Phayathai["justify"] = "center"
740 btn_Phayathai["text"] = "Phayathai"
741 btn_Phayathai.place(x=390,y=340,width=120,height=40)
742 btn_Phayathai["command"] = self.btn_Phayathai_command
743
744 btn_Pinklao=tk.Button(root)
745 btn_Pinklao["bg"] = "#ffd2d2"
746 ft = tkFont.Font(family='Times',size=12)
747 btn_Pinklao["font"] = ft
748 btn_Pinklao["fg"] = "#000000"
749 btn_Pinklao["justify"] = "center"
750 btn_Pinklao["text"] = "Pinklao"
751 btn_Pinklao.place(x=390,y=410,width=120,height=40)
752 btn_Pinklao["command"] = self.btn_Pinklao_command
753
754 btn_Taladplu=tk.Button(root)
755 btn_Taladplu["bg"] = "#ff5722"
756 ft = tkFont.Font(family='Times',size=12)
757 btn_Taladplu["font"] = ft
758 btn_Taladplu["fg"] = "#ffffff"
759 btn_Taladplu["justify"] = "center"
760 btn_Taladplu["text"] = "TaladPlu"
761 btn_Taladplu.place(x=550,y=130,width=120,height=40)
762 btn_Taladplu["command"] = self.btn_Taladplu_command
763
764 btn_Vibhavadi=tk.Button(root)
765 btn_Vibhavadi["bg"] = "#ff6d3f"
766 ft = tkFont.Font(family='Times',size=12)
767 btn_Vibhavadi["font"] = ft
768 btn_Vibhavadi["fg"] = "#ffffff"
769 btn_Vibhavadi["justify"] = "center"
770 btn_Vibhavadi["text"] = "Vibhavadi"
771 btn_Vibhavadi.place(x=550,y=200,width=120,height=40)
772 btn_Vibhavadi["command"] = self.btn_Vibhavadi_command
773
774 btn_Phrakhanong=tk.Button(root)
775 btn_Phrakhanong["bg"] = "#fe9474"
776 ft = tkFont.Font(family='Times',size=12)
777 btn_Phrakhanong["font"] = ft
778 btn_Phrakhanong["fg"] = "#ffffff"
779 btn_Phrakhanong["justify"] = "center"
780 btn_Phrakhanong["text"] = "PhraKhanong"
781 btn_Phrakhanong.place(x=550,y=270,width=120,height=40)
782 btn_Phrakhanong["command"] = self.btn_Phrakhanong_command
783
784 btn_Ratchayothin=tk.Button(root)
785 btn_Ratchayothin["bg"] = "#ffb29a"
786 ft = tkFont.Font(family='Times',size=12)
787 btn_Ratchayothin["font"] = ft
788 btn_Ratchayothin["fg"] = "#000000"
789 btn_Ratchayothin["justify"] = "center"
790 btn_Ratchayothin["text"] = "Ratchayothin"
791 btn_Ratchayothin.place(x=550,y=340,width=120,height=40)
792 btn_Ratchayothin["command"] = self.btn_Ratchayothin_command
793
794 btn_Allaera=tk.Button(root)
795 btn_Allaera["bg"] = "#ffd2c4"
796 ft = tkFont.Font(family='Times',size=12)
797 btn_Allaera["font"] = ft
798 btn_Allaera["fg"] = "#000000"
799 btn_Allaera["justify"] = "center"
800 btn_Allaera["text"] = "ALL_AREA"
801 btn_Allaera.place(x=550,y=410,width=120,height=40)
802 btn_Allaera["command"] = self.btn_Allaera_command
803
804 label_propertyhub=tk.Label(root)
805 label_propertyhub["bg"] = "#3fb7e7"
806 label_propertyhub["disabledforeground"] = "#ffffff"
807 ft = tkFont.Font(family='Times',size=22)
808 label_propertyhub["font"] = ft
809 label_propertyhub["fg"] = "#ffffff"
810 label_propertyhub["justify"] = "center"
811 label_propertyhub["text"] = "PropertyHub"
812 label_propertyhub.place(x=270,y=40,width=208,height=48)
813
814 #Btn Section
815 def btn_Bangkok_command(self):
816 print("Bangkok : Please..wait..")
817 ScrapingAll.Bangkok()
818
819
820 def btn_Ratchada_command(self):
821 print("Ratchada : Please..wait..")
822 ScrapingAll.Ratchada()
823
824
825 def btn_Muangthong_command(self):
826 print("Muangthong : Please..wait..")
827 ScrapingAll.Muangthong()
828
829
830 def btn_Sukumvit_command(self):
831 print("Sukumvit : Please..wait..")
832 ScrapingAll.Sukumvit()
833
834
835 def btn_Nonthaburi_command(self):
836 print("Nonthaburi : Please..wait..")
837 ScrapingAll.Nonthaburi()
838
839
840 def btn_Chula_command(self):
841 print("Chula : Please..wait..")
842 ScrapingAll.Chula()
843
844
845 def btn_Ladprao_command(self):
846 print("Ladprao : Please..wait..")
847 ScrapingAll.Ladprao()
848
849
850 def btn_Rama9_command(self):
851 print("Rama9 : Please..wait..")
852 ScrapingAll.Rama9()
853
854
855 def btn_Ramkhamhaeng_command(self):
856 print("Ramkhamhaeng : Please..wait..")
857 ScrapingAll.Ramkhamhaeng()
858
859
860 def btn_Onnuch_command(self):
861 print("Onnuch : Please..wait..")
862 ScrapingAll.Onnuch()
863
864
865 def btn_Chaengwattana_command(self):
866 print("Chaengwattana : Please..wait..")
867 ScrapingAll.Chaengwattana()
868
869
870 def btn_Saphankwai_command(self):
871 print("Saphankwai : Please..wait..")
872 ScrapingAll.SaphanKwai()
873
874
875 def btn_Sathon_command(self):
876 print("Sathon : Please..wait..")
877 ScrapingAll.Sathon()
878
879
880 def btn_Phayathai_command(self):
881 print("Phayathai : Please..wait..")
882 ScrapingAll.Phayathai()
883
884
885 def btn_Pinklao_command(self):
886 print("Pinklao : Please..wait..")
887 ScrapingAll.Pinklao()
888
889
890 def btn_Taladplu_command(self):
891 print("Taladplu : Please..wait..")
892 ScrapingAll.TaladPlu()
893
894
895 def btn_Vibhavadi_command(self):
896 print("Vibhavadi : Please..wait..")
897 ScrapingAll.Vibhavadi()
898
899
900 def btn_Phrakhanong_command(self):
901 print("Phrakhanong : Please..wait..")
902 ScrapingAll.PhraKhanong()
903
904
905 def btn_Ratchayothin_command(self):
906 print("Ratchayothin : Please..wait..")
907 ScrapingAll.Ratchayothin()
908
909
910 def btn_Allaera_command(self):
911 print("ALL AREA : Please..wait..")
912 ScrapingAll.Bangkok()
913 ScrapingAll.Ratchada()
914 ScrapingAll.Muangthong()
915 ScrapingAll.Sukumvit()
916 ScrapingAll.Nonthaburi()
917 ScrapingAll.Chula()
918 ScrapingAll.Ladprao()
919 ScrapingAll.Rama9()
920 ScrapingAll.Ramkhamhaeng()
921 ScrapingAll.Onnuch()
922 ScrapingAll.Chaengwattana()
923 ScrapingAll.SaphanKwai()
924 ScrapingAll.Sathon()
925 ScrapingAll.Phayathai()
926 ScrapingAll.Pinklao()
927 ScrapingAll.TaladPlu()
928 ScrapingAll.Vibhavadi()
929 ScrapingAll.PhraKhanong()
930 ScrapingAll.Ratchayothin()
931 print("All AREA : Seccess..")
932
933 if __name__ == "__main__":
934 root = tk.Tk()
935 app = App(root)
936 root.mainloop()
937
938
939