| import streamlit as st |
| import base64 |
| import json |
| import re |
| import html |
| import pandas as pd |
|
|
| |
| st.set_page_config( |
| page_title="Proxy Server Decoder", |
| page_icon="π", |
| layout="wide" |
| ) |
|
|
| class ProxyServerDecoder: |
| def decode_proxy_entry(self, encoded_str): |
| """Decode a single proxy entry from base64 -> hex -> JSON""" |
| try: |
| |
| padding = len(encoded_str) % 4 |
| if padding: |
| encoded_str += '=' * (4 - padding) |
| |
| |
| decoded_bytes = base64.b64decode(encoded_str) |
| |
| |
| hex_string = decoded_bytes.decode('ascii') |
| json_string = bytes.fromhex(hex_string).decode('ascii') |
| |
| |
| return { |
| "status": "success", |
| "decoded_data": json.loads(json_string), |
| "original_length": len(encoded_str) |
| } |
| except Exception as e: |
| return { |
| "status": "error", |
| "error": f"Decoding failed: {str(e)}", |
| "original_length": len(encoded_str) |
| } |
| |
| def extract_entries_from_html(self, html_content): |
| """Extract base64 entries from HTML data-ss attribute""" |
| try: |
| unescaped_html = html.unescape(html_content) |
| |
| |
| patterns = [ |
| r'data-ss="\[(.*?)\]"', |
| r"data-ss='\[(.*?)\]'", |
| r'data-ss=\s*\[\s*(.*?)\s*\]' |
| ] |
| |
| data_ss_content = None |
| for pattern in patterns: |
| match = re.search(pattern, unescaped_html, re.DOTALL) |
| if match: |
| data_ss_content = match.group(1) |
| break |
| |
| if not data_ss_content: |
| return [] |
| |
| |
| base64_pattern = r'["\']([A-Za-z0-9+/=]+)["\']' |
| entries = re.findall(base64_pattern, data_ss_content) |
| |
| return entries |
| except Exception as e: |
| st.error(f"Error extracting from HTML: {e}") |
| return [] |
| |
| def process_html(self, html_content): |
| """Process HTML content and decode all entries""" |
| entries = self.extract_entries_from_html(html_content) |
| |
| if not entries: |
| return { |
| "status": "error", |
| "message": "No entries found in HTML" |
| } |
| |
| results = [] |
| success_count = 0 |
| |
| for i, entry in enumerate(entries): |
| result = self.decode_proxy_entry(entry) |
| result_data = { |
| "entry_number": i + 1, |
| "encoded_string": entry, |
| "encoded_preview": f"{entry[:20]}...{entry[-10:]}" if len(entry) > 40 else entry, |
| **result |
| } |
| results.append(result_data) |
| |
| if result["status"] == "success": |
| success_count += 1 |
| |
| return { |
| "status": "success", |
| "statistics": { |
| "total_entries": len(entries), |
| "successful_decodes": success_count, |
| "failed_decodes": len(entries) - success_count, |
| "success_rate": f"{(success_count/len(entries))*100:.1f}%" |
| }, |
| "results": results |
| } |
|
|
| def main(): |
| decoder = ProxyServerDecoder() |
| |
| st.title("π Proxy Server Decoder") |
| st.markdown("Decode base64-encoded proxy server information from HTML script tags") |
| |
| |
| sample_html = '''<script |
| id="serverSelectorScript" |
| data-u=""https://adnade.net/ptp/?user=platformsincome&subid=131370"" |
| data-d="0" |
| data-ss="["N2IyMjY5NjQyMjNhMzIzMDMxMmMyMjc1NzI2YzIyM2EyMjY4NzQ3NDcwNzMzYTVjMmY1YzJmMzEzMDM4MmUzMTM4MzEyZTMxMzEyZTMxMzczMTVjMmY1ZjVmNjM3MDMyMmU3MDY4NzAyMjJjMjI2ZTYxNmQ2NTIyM2EyMjcwNzM3OTYzNjg3YTRjNmY3MzQxNmU2NzY1NmM2NTczMzI0ZDIyN2Q=","N2IyMjY5NjQyMjNhMzEzODM1MmMyMjc1NzI2YzIyM2EyMjY4NzQ3NDcwNzMzYTVjMmY1YzJmMzkzNTJlMzIzMTM0MmUzNTMzMmUzNDM4NWMyZjVmNWY2MzcwMzIyZTcwNjg3MDIyMmMyMjZlNjE2ZDY1MjIzYTIyNmQ2NTc2NTM3MDYxNjM2NTU3NjE3MjczNjE3NzUwMjI3ZA=="]" |
| ></script>''' |
| |
| |
| tab1, tab2 = st.tabs(["π HTML Parser", "π€ Single Entry"]) |
| |
| with tab1: |
| st.header("HTML Content Parser") |
| |
| html_input = st.text_area( |
| "Paste your HTML content with data-ss attribute:", |
| value=sample_html, |
| height=300, |
| help="Paste the entire HTML script tag containing data-ss attribute" |
| ) |
| |
| if st.button("π Parse HTML", type="primary"): |
| if html_input.strip(): |
| with st.spinner("Processing HTML content..."): |
| result = decoder.process_html(html_input) |
| |
| if result["status"] == "success": |
| st.success(f"β
Processed {result['statistics']['total_entries']} entries!") |
| |
| |
| stats = result["statistics"] |
| col1, col2, col3, col4 = st.columns(4) |
| with col1: |
| st.metric("Total Entries", stats["total_entries"]) |
| with col2: |
| st.metric("Successful", stats["successful_decodes"]) |
| with col3: |
| st.metric("Failed", stats["failed_decodes"]) |
| with col4: |
| st.metric("Success Rate", stats["success_rate"]) |
| |
| |
| st.subheader("Decoded Results") |
| for entry in result["results"]: |
| with st.expander(f"Entry #{entry['entry_number']} - {entry['encoded_preview']}"): |
| if entry["status"] == "success": |
| st.json(entry["decoded_data"]) |
| else: |
| st.error(f"Error: {entry['error']}") |
| |
| |
| json_str = json.dumps(result, indent=2) |
| st.download_button( |
| label="π₯ Download Results as JSON", |
| data=json_str, |
| file_name="decoded_results.json", |
| mime="application/json" |
| ) |
| else: |
| st.error(f"β {result.get('message', 'Processing failed')}") |
| else: |
| st.warning("Please enter HTML content") |
| |
| with tab2: |
| st.header("Single Entry Decoder") |
| |
| single_input = st.text_area( |
| "Enter base64 encoded string:", |
| height=150, |
| placeholder="Paste your base64 string here...", |
| help="Enter a single base64 encoded proxy entry" |
| ) |
| |
| if st.button("π Decode Single Entry", type="primary"): |
| if single_input.strip(): |
| with st.spinner("Decoding entry..."): |
| result = decoder.decode_proxy_entry(single_input.strip()) |
| |
| if result["status"] == "success": |
| st.success("β
Entry decoded successfully!") |
| st.json(result["decoded_data"]) |
| |
| |
| json_str = json.dumps(result["decoded_data"], indent=2) |
| st.download_button( |
| label="π₯ Download Decoded Data", |
| data=json_str, |
| file_name="single_decoded.json", |
| mime="application/json" |
| ) |
| else: |
| st.error(f"β {result.get('error', 'Decoding failed')}") |
| else: |
| st.warning("Please enter a base64 string") |
|
|
| if __name__ == "__main__": |
| main() |