Starchik commited on
Commit
1165dfe
1 Parent(s): 2c6e647

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +306 -0
app.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import requests
3
+ from requests.exceptions import RequestException
4
+ from bs4 import BeautifulSoup
5
+ import concurrent.futures
6
+ from io import BytesIO
7
+ import streamlit as st
8
+
9
+
10
+
11
+ def extract_part_numbers_and_manufacturers(url):
12
+ try:
13
+ response = requests.get(url)
14
+ if response.status_code == 200:
15
+ soup = BeautifulSoup(response.text, 'html.parser')
16
+ links = soup.find_all('a')
17
+ art_links = [link.get('href') for link in links if link.get('href') and link.get('href').startswith('/art')]
18
+
19
+ if art_links:
20
+ unique_art_links = list(set(art_links))
21
+ parts_info = []
22
+ part_urls = []
23
+ for link in unique_art_links:
24
+ parts = link.split('-')
25
+ if len(parts) >= 4:
26
+ part_number = parts[1]
27
+ manufacturer = '-'.join(parts[2:-1])
28
+ part_link = f'https://alfacars.com.ua/index.php?route=product/search&search={part_number}'
29
+ part_info = f'{part_number} - {manufacturer}'
30
+ parts_info.append(part_info)
31
+ part_urls.append(part_link)
32
+
33
+ if parts_info:
34
+ chunks = [parts_info[i:i + 100] for i in range(0, len(parts_info), 100)]
35
+ return chunks, part_urls
36
+ else:
37
+ return None, None
38
+ else:
39
+ return None, None
40
+ else:
41
+ return None, None
42
+ except requests.RequestException:
43
+ return None, None
44
+
45
+
46
+ def get_manufacturer_info(manufacturer):
47
+ try:
48
+ url = f'https://avto.pro/makers/{manufacturer}'
49
+ response = requests.get(url)
50
+ if response.status_code == 200:
51
+ soup = BeautifulSoup(response.text, 'html.parser')
52
+ description = soup.find('div', {'class': 'maker-descr'}).text.strip()
53
+ stars = soup.find('span', {'class': 'star-line'})
54
+ stars_html = ''.join(['★' if star.get('class') and 'star-line__star--full' in star.get('class') else '☆' for star in stars.find_all('i')]) if stars else ''
55
+ st.write(f'Информация о фирме {manufacturer}:\n{description}\n{stars_html}')
56
+ else:
57
+ st.write('Ошибка при получении информации о фирме.')
58
+ except requests.RequestException:
59
+ st.write('Информация о фирме не найдена. Попробуйте ввести VIN-код.')
60
+
61
+
62
+
63
+ def check_part_availability(part_urls):
64
+ available_parts = []
65
+ with concurrent.futures.ThreadPoolExecutor() as executor:
66
+ futures = {executor.submit(check_part_url, url): url for url in part_urls}
67
+ for future in concurrent.futures.as_completed(futures):
68
+ url = futures[future]
69
+ try:
70
+ result = future.result()
71
+ if result:
72
+ available_parts.append(url)
73
+ except Exception as e:
74
+ print(f"Ошибка при проверке {url}: {e}")
75
+ return available_parts
76
+
77
+ def check_part_url(url):
78
+ try:
79
+ response = requests.get(url)
80
+ if response.status_code == 200:
81
+ soup = BeautifulSoup(response.text, 'html.parser')
82
+ buy_button = soup.find('button', {'onclick': lambda x: x and 'cart.add' in x})
83
+ if buy_button:
84
+ return True
85
+ except requests.RequestException as e:
86
+ print(f"Ошибка при запросе {url}: {e}")
87
+ return False
88
+
89
+ def get_clickable_links(part_numbers_and_manufacturers):
90
+ clickable_links = ''
91
+ for part_info in part_numbers_and_manufacturers:
92
+ part_number, manufacturer = part_info.split(' - ')
93
+ part_number = part_number.split(' ')[0]
94
+ url = f'https://alfacars.com.ua/index.php?route=product/search&search={part_number}'
95
+ try:
96
+ response = requests.get(url)
97
+ if response.status_code == 200:
98
+ soup = BeautifulSoup(response.text, 'html.parser')
99
+ buy_button = soup.find('button', {'onclick': lambda x: x and 'cart.add' in x})
100
+ if buy_button:
101
+ clickable_links += f'<a href="{url}">{part_number}</a> - {manufacturer}\n'
102
+ else:
103
+ clickable_links += f'{part_number} - {manufacturer}\n'
104
+ except requests.RequestException:
105
+ clickable_links += f'{part_number} - {manufacturer}\n'
106
+ return clickable_links
107
+
108
+ def get_vin_info(vin_code):
109
+ url = f"https://www.ilcats.ru/?vin={vin_code}&VinAction=Search"
110
+ headers = {
111
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36'
112
+ }
113
+ response = requests.get(url, headers=headers)
114
+
115
+ if response.status_code == 200:
116
+ soup = BeautifulSoup(response.content, "html.parser")
117
+ table = soup.find("table")
118
+ if table:
119
+ rows = table.find_all("tr")
120
+ info = ""
121
+ for row in rows:
122
+ cols = row.find_all("td")
123
+ if len(cols) == 2:
124
+ label = cols[0].text.strip()
125
+ value = cols[1].text.strip()
126
+ info += f"{label}: {value}\n"
127
+ st.text(info)
128
+ st.write(f"Показать информацию: https://starchik-vinco.hf.space?vin_code={vin_code}")
129
+ else:
130
+ st.write("")
131
+ else:
132
+ st.write("Ошибка при запросе информации о VIN-коде. Попробуйте еще раз.")
133
+
134
+
135
+
136
+ def get_auto_lot_info(query):
137
+ url = "https://ru.autoauctionspot.com/salvage-cars-auction/"
138
+ payload = {
139
+ 'lot-type-available-to-bid': '1',
140
+ 'search-term': query,
141
+ }
142
+ images = [] # Определение переменной images перед использованием
143
+
144
+ try:
145
+ response = requests.post(url, data=payload)
146
+ response.raise_for_status()
147
+
148
+ soup = BeautifulSoup(response.content, 'html.parser')
149
+ lot_title = soup.find('h1', class_='slider-name').text.strip()
150
+ damage_info = soup.find('div', class_='char-line')
151
+ damage_type_primary = damage_info.find('span', class_='char-info-envelope').text.strip()
152
+ damage_type_secondary = damage_info.find_all('span', class_='char-info-envelope')[1].text.strip()
153
+
154
+ lot_info = f"Лот: {lot_title}\nУщерб: {damage_type_primary}, {damage_type_secondary}\n"
155
+
156
+ char_wrap = soup.find_all('div', class_='char-wrap')
157
+ for char_box in char_wrap:
158
+ char_lines = char_box.find_all('div', class_='char-line')
159
+ for char_line in char_lines:
160
+ char_name = char_line.find('span', class_='char-name').text.strip()
161
+ char_info = char_line.find('span', class_='char-info').text.strip()
162
+ lot_info += f"{char_name}: {char_info}\n"
163
+
164
+ car_slider = soup.find('div', class_='car-slider')
165
+ image_links = [figure.a['href'] for figure in car_slider.find_all('figure')]
166
+
167
+ for link in image_links:
168
+ image_response = requests.get(link)
169
+ image_data = BytesIO(image_response.content)
170
+ images.append(image_data)
171
+
172
+ final_bid_info = soup.find('div', class_='copart-bid_final')
173
+ final_bid_title = final_bid_info.find('span', class_='copart-bid-title').text.strip()
174
+ final_bid_amount = final_bid_info.find('span', class_='copart-price').text.strip()
175
+ final_bid_info = f"{final_bid_title}: {final_bid_amount}"
176
+
177
+ return lot_info, images, final_bid_info
178
+
179
+ except requests.RequestException as e:
180
+ print(f"Ошибка при запросе {url}: {e}")
181
+ return None, None, None
182
+ except Exception as e:
183
+ print(f"Необработанная ошибка в get_auto_lot_info: {e}")
184
+ return None, None, None
185
+
186
+
187
+
188
+ def search_part_info(part_code):
189
+ found_results = False
190
+
191
+ try:
192
+ query = part_code.replace(' ', '+')
193
+ url = f'https://dok.ua/result2/query?q={query}'
194
+
195
+ response = requests.get(url)
196
+ if response.status_code == 200:
197
+ soup = BeautifulSoup(response.text, 'html.parser')
198
+ links = soup.find_all('a')
199
+ found_link = None
200
+ for link in links:
201
+ href = link.get('href')
202
+ if href and '/result2/art?id=' in href:
203
+ found_link = href
204
+ break
205
+
206
+ if found_link:
207
+ found_url = f'https://dok.ua{found_link}'
208
+ part_info_chunks, _ = extract_part_numbers_and_manufacturers(found_url)
209
+ if part_info_chunks:
210
+ part_numbers_and_manufacturers = [part_info for part_info_chunk in part_info_chunks for part_info in part_info_chunk]
211
+ part_info_text = '\n'.join(part_numbers_and_manufacturers) + '\n'
212
+
213
+ st.text(part_info_text)
214
+
215
+ clickable_links = get_clickable_links(part_numbers_and_manufacturers)
216
+ st.markdown(clickable_links, unsafe_allow_html=True)
217
+ found_results = True
218
+ else:
219
+ get_manufacturer_info(part_code)
220
+ else:
221
+ lot_info, images, final_bid_info = get_auto_lot_info(query)
222
+
223
+ if lot_info is not None and images is not None and final_bid_info is not None:
224
+ st.text((lot_info or '') + "\n" + (final_bid_info or ''))
225
+
226
+ if images:
227
+ st.image(images)
228
+ found_results = True
229
+ get_vin_info(part_code)
230
+ else:
231
+ st.text("Нет результатов...")
232
+
233
+ else:
234
+ st.text(f"Ошибка при запросе {url}. Статус код: {response.status_code}")
235
+ if not found_link and not found_results:
236
+ st.text('Ин��ормация не найдена')
237
+
238
+ except Exception as e:
239
+ st.text(f"Ошибка в обработчике сообщений: {e}")
240
+ if not found_results:
241
+ st.text('Что-то пошло не так... Пожалуйста, попробуйте позже.')
242
+
243
+
244
+
245
+ if __name__ == '__main__':
246
+ st.title("Alfabot Streamlit")
247
+
248
+ part_code = st.text_input("Введите код запчасти, VIN-код или название фирмы")
249
+ if st.button("Поиск информации"):
250
+ st.text("Идет поиск информации...")
251
+
252
+ found_results = False
253
+
254
+ try:
255
+ query = part_code.replace(' ', '+')
256
+ url = f'https://dok.ua/result2/query?q={query}'
257
+
258
+ response = requests.get(url)
259
+ if response.status_code == 200:
260
+ soup = BeautifulSoup(response.text, 'html.parser')
261
+ links = soup.find_all('a')
262
+ found_link = None
263
+ for link in links:
264
+ href = link.get('href')
265
+ if href and '/result2/art?id=' in href:
266
+ found_link = href
267
+ break
268
+
269
+ if found_link:
270
+ found_url = f'https://dok.ua{found_link}'
271
+ part_info_chunks, _ = extract_part_numbers_and_manufacturers(found_url)
272
+ if part_info_chunks:
273
+ part_numbers_and_manufacturers = [part_info for part_info_chunk in part_info_chunks for part_info in part_info_chunk]
274
+ part_info_text = '\n'.join(part_numbers_and_manufacturers) + '\n'
275
+
276
+ st.text(part_info_text)
277
+
278
+ clickable_links = get_clickable_links(part_numbers_and_manufacturers)
279
+ st.markdown(clickable_links, unsafe_allow_html=True)
280
+ found_results = True
281
+ else:
282
+ get_manufacturer_info(part_code)
283
+ else:
284
+ lot_info, images, final_bid_info = get_auto_lot_info(query)
285
+
286
+ if lot_info is not None and images is not None and final_bid_info is not None:
287
+ st.text((lot_info or '') + "\n" + (final_bid_info or ''))
288
+
289
+ if images:
290
+ st.image(images)
291
+ found_results = True
292
+ else:
293
+ st.text("")
294
+
295
+ else:
296
+ st.text(f"Ошибка при запросе {url}. Статус код: {response.status_code}")
297
+ if not found_link and not found_results:
298
+ st.text('Информация не найдена')
299
+
300
+ get_vin_info(part_code)
301
+
302
+ except Exception as e:
303
+ st.text(f"Ошибка в обработчике сообщений: {e}")
304
+ if not found_results:
305
+ st.text('Что-то пошло не так... Пожалуйста, попробуйте позже.')
306
+