alfacrossbot / app.py
Starchik's picture
Update app.py
01763f4 verified
raw
history blame
No virus
13.1 kB
import os
import requests
from requests.exceptions import RequestException
from bs4 import BeautifulSoup
import concurrent.futures
from io import BytesIO
import streamlit as st
def extract_part_numbers_and_manufacturers(url):
try:
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
links = soup.find_all('a')
art_links = [link.get('href') for link in links if link.get('href') and link.get('href').startswith('/art')]
if art_links:
unique_art_links = list(set(art_links))
parts_info = []
part_urls = []
for link in unique_art_links:
parts = link.split('-')
if len(parts) >= 4:
part_number = parts[1]
manufacturer = '-'.join(parts[2:-1])
part_link = f'https://alfacars.com.ua/index.php?route=product/search&search={part_number}'
part_info = f'{part_number} - {manufacturer}'
parts_info.append(part_info)
part_urls.append(part_link)
if parts_info:
chunks = [parts_info[i:i + 100] for i in range(0, len(parts_info), 100)]
return chunks, part_urls
else:
return None, None
else:
return None, None
else:
return None, None
except requests.RequestException:
return None, None
def get_manufacturer_info(manufacturer):
try:
url = f'https://avto.pro/makers/{manufacturer}'
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
description = soup.find('div', {'class': 'maker-descr'}).text.strip()
stars = soup.find('span', {'class': 'star-line'})
stars_html = ''.join(['★' if star.get('class') and 'star-line__star--full' in star.get('class') else '☆' for star in stars.find_all('i')]) if stars else ''
st.write(f'Информация о фирме {manufacturer}:\n{description}\n{stars_html}')
else:
st.write('Ошибка при получении информации о фирме.')
except requests.RequestException:
st.write('Информация о фирме не найдена. Попробуйте ввести VIN-код.')
def check_part_availability(part_urls):
available_parts = []
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {executor.submit(check_part_url, url): url for url in part_urls}
for future in concurrent.futures.as_completed(futures):
url = futures[future]
try:
result = future.result()
if result:
available_parts.append(url)
except Exception as e:
print(f"Ошибка при проверке {url}: {e}")
return available_parts
def check_part_url(url):
try:
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
buy_button = soup.find('button', {'onclick': lambda x: x and 'cart.add' in x})
if buy_button:
return True
except requests.RequestException as e:
print(f"Ошибка при запросе {url}: {e}")
return False
def get_clickable_links(part_numbers_and_manufacturers):
clickable_links = ''
for part_info in part_numbers_and_manufacturers:
part_number, manufacturer = part_info.split(' - ')
part_number = part_number.split(' ')[0]
url = f'https://alfacars.com.ua/index.php?route=product/search&search={part_number}'
try:
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
buy_button = soup.find('button', {'onclick': lambda x: x and 'cart.add' in x})
if buy_button:
clickable_links += f'<a href="{url}">{part_number}</a> - {manufacturer}\n'
else:
clickable_links += f'{part_number} - {manufacturer}\n'
except requests.RequestException:
clickable_links += f'{part_number} - {manufacturer}\n'
return clickable_links
def get_vin_info(vin_code):
url = f"https://www.ilcats.ru/?vin={vin_code}&VinAction=Search"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36'
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
soup = BeautifulSoup(response.content, "html.parser")
table = soup.find("table")
if table:
rows = table.find_all("tr")
info = ""
for row in rows:
cols = row.find_all("td")
if len(cols) == 2:
label = cols[0].text.strip()
value = cols[1].text.strip()
info += f"{label}: {value}\n"
st.text(info)
st.write(f"Показать информацию: https://starchik-vinco.hf.space?vin_code={vin_code}")
else:
st.write("")
else:
st.write("Ошибка при запросе информации о VIN-коде. Попробуйте еще раз.")
def get_auto_lot_info(query):
url = "https://ru.autoauctionspot.com/salvage-cars-auction/"
payload = {
'lot-type-available-to-bid': '1',
'search-term': query,
}
images = [] # Определение переменной images перед использованием
try:
response = requests.post(url, data=payload)
response.raise_for_status()
soup = BeautifulSoup(response.content, 'html.parser')
lot_title = soup.find('h1', class_='slider-name').text.strip()
damage_info = soup.find('div', class_='char-line')
damage_type_primary = damage_info.find('span', class_='char-info-envelope').text.strip()
damage_type_secondary = damage_info.find_all('span', class_='char-info-envelope')[1].text.strip()
lot_info = f"Лот: {lot_title}\nУщерб: {damage_type_primary}, {damage_type_secondary}\n"
char_wrap = soup.find_all('div', class_='char-wrap')
for char_box in char_wrap:
char_lines = char_box.find_all('div', class_='char-line')
for char_line in char_lines:
char_name = char_line.find('span', class_='char-name').text.strip()
char_info = char_line.find('span', class_='char-info').text.strip()
lot_info += f"{char_name}: {char_info}\n"
car_slider = soup.find('div', class_='car-slider')
image_links = [figure.a['href'] for figure in car_slider.find_all('figure')]
for link in image_links:
image_response = requests.get(link)
image_data = BytesIO(image_response.content)
images.append(image_data)
final_bid_info = soup.find('div', class_='copart-bid_final')
final_bid_title = final_bid_info.find('span', class_='copart-bid-title').text.strip()
final_bid_amount = final_bid_info.find('span', class_='copart-price').text.strip()
final_bid_info = f"{final_bid_title}: {final_bid_amount}"
return lot_info, images, final_bid_info
except requests.RequestException as e:
print(f"Ошибка при запросе {url}: {e}")
return None, None, None
except Exception as e:
print(f"Необработанная ошибка в get_auto_lot_info: {e}")
return None, None, None
def search_part_info(part_code):
found_results = False
try:
query = part_code.replace(' ', '+')
url = f'https://dok.ua/result2/query?q={query}'
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
links = soup.find_all('a')
found_link = None
for link in links:
href = link.get('href')
if href and '/result2/art?id=' in href:
found_link = href
break
if found_link:
found_url = f'https://dok.ua{found_link}'
part_info_chunks, _ = extract_part_numbers_and_manufacturers(found_url)
if part_info_chunks:
part_numbers_and_manufacturers = [part_info for part_info_chunk in part_info_chunks for part_info in part_info_chunk]
part_info_text = '\n'.join(part_numbers_and_manufacturers) + '\n'
st.text(part_info_text)
clickable_links = get_clickable_links(part_numbers_and_manufacturers)
st.markdown(clickable_links, unsafe_allow_html=True)
found_results = True
else:
get_manufacturer_info(part_code)
else:
lot_info, images, final_bid_info = get_auto_lot_info(query)
if lot_info is not None and images is not None and final_bid_info is not None:
st.text((lot_info or '') + "\n" + (final_bid_info or ''))
if images:
st.image(images)
found_results = True
get_vin_info(part_code)
else:
st.text("Нет результатов...")
else:
st.text(f"Ошибка при запросе {url}. Статус код: {response.status_code}")
if not found_link and not found_results:
st.text('Информация не найдена')
except Exception as e:
st.text(f"Ошибка в обработчике сообщений: {e}")
if not found_results:
st.text('Что-то пошло не так... Пожалуйста, попробуйте позже.')
if __name__ == '__main__':
st.set_page_config(page_title="Alfabot", page_icon="🚗")
part_code = st.text_input("Введите код запчасти, VIN-код или название фирмы")
if st.button("Поиск информации"):
st.text("Идет поиск информации...")
found_results = False
try:
query = part_code.replace(' ', '+')
url = f'https://dok.ua/result2/query?q={query}'
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
links = soup.find_all('a')
found_link = None
for link in links:
href = link.get('href')
if href and '/result2/art?id=' in href:
found_link = href
break
if found_link:
found_url = f'https://dok.ua{found_link}'
part_info_chunks, _ = extract_part_numbers_and_manufacturers(found_url)
if part_info_chunks:
part_numbers_and_manufacturers = [part_info for part_info_chunk in part_info_chunks for part_info in part_info_chunk]
part_info_text = '\n'.join(part_numbers_and_manufacturers) + '\n'
st.text(part_info_text)
clickable_links = get_clickable_links(part_numbers_and_manufacturers)
st.markdown(clickable_links, unsafe_allow_html=True)
found_results = True
else:
get_manufacturer_info(part_code)
else:
lot_info, images, final_bid_info = get_auto_lot_info(query)
if lot_info is not None and images is not None and final_bid_info is not None:
st.text((lot_info or '') + "\n" + (final_bid_info or ''))
if images:
st.image(images)
found_results = True
else:
st.text("")
else:
st.text(f"Ошибка при запросе {url}. Статус код: {response.status_code}")
if not found_link and not found_results:
st.text('Информация не найдена')
get_vin_info(part_code)
except Exception as e:
st.text(f"Ошибка в обработчике сообщений: {e}")
if not found_results:
st.text('Что-то пошло не так... Пожалуйста, попробуйте позже.')