Not CPaste is publishing to multile PrivateBin instances by default.
This commit is contained in:
parent
9d950f6a3f
commit
06c01c23c4
@ -1,4 +1,4 @@
|
||||
from pastebins.cpaste import publish_to_cpaste
|
||||
from pastebins.cpaste import publish_to_multiple_cpastes
|
||||
from pastebins.paste_python import publish_to_pinnwand
|
||||
from pastebins.termbin import publish_to_termbin
|
||||
from pastebins.paste_mozilla import publish_to_mozilla
|
||||
@ -29,9 +29,9 @@ def publish_to_multiple_pastebins(file_path):
|
||||
results['Dpaste'] = dpaste_url
|
||||
|
||||
# Publish to CPaste
|
||||
cpaste_url = publish_to_cpaste(file_path)
|
||||
if cpaste_url:
|
||||
results['CPaste'] = cpaste_url
|
||||
cpaste_results = publish_to_multiple_cpastes(file_path)
|
||||
if cpaste_results:
|
||||
results['CPaste'] = cpaste_results
|
||||
|
||||
# Publish to paste python org
|
||||
pbin_url = publish_to_pinnwand(file_content)
|
||||
|
@ -34,6 +34,10 @@ import json
|
||||
import base64
|
||||
import zlib
|
||||
import requests
|
||||
import html
|
||||
import random
|
||||
from bs4 import BeautifulSoup
|
||||
from packaging import version as pkg_version
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
@ -385,9 +389,9 @@ def main():
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
def publish_to_cpaste(file):
|
||||
def publish_to_cpaste(file, paste_url='https://cpaste.org'):
|
||||
#paste_url = 'https://cpaste.org'
|
||||
paste_url = 'https://paste.devsite.pl'
|
||||
#paste_url = 'https://paste.devsite.pl'
|
||||
paste_formatter = 'plaintext'
|
||||
paste_compress = True
|
||||
paste_expire = '1day'
|
||||
@ -418,6 +422,78 @@ def publish_to_cpaste(file):
|
||||
output = '%s%s#%s DELETE TOKEN: %s' % (paste_url, paste_id, paste_decrypt, paste_deletetoken)
|
||||
return output
|
||||
|
||||
def publish_to_multiple_cpastes(file, num_instances=3):
|
||||
instances = get_list_of_instances()
|
||||
|
||||
# Ensure we don't try to select more instances than available
|
||||
num_instances = min(num_instances, len(instances))
|
||||
|
||||
# Randomly select num_instances from the list
|
||||
selected_instances = random.sample(instances, num_instances)
|
||||
|
||||
results = {}
|
||||
for instance in selected_instances:
|
||||
try:
|
||||
# Extract the base URL from the instance link
|
||||
base_url = instance.split('?')[0]
|
||||
result = publish_to_cpaste(file, base_url)
|
||||
results[base_url] = result
|
||||
except Exception as e:
|
||||
print(f"Failed to publish to {instance}: {str(e)}")
|
||||
|
||||
return results
|
||||
|
||||
def get_list_of_instances():
|
||||
|
||||
# Function to fetch HTML content from a URL
|
||||
def fetch_html_from_url(url):
|
||||
response = requests.get(url)
|
||||
response.raise_for_status() # Raise an HTTPError for bad responses
|
||||
return response.text
|
||||
|
||||
# Function to find the highest version number
|
||||
def find_highest_version(soup):
|
||||
versions = set()
|
||||
rows = soup.find_all('tr', class_='opacity4')
|
||||
|
||||
for row in rows:
|
||||
version_td = row.find_all('td')[1].text.strip()
|
||||
versions.add(version_td)
|
||||
|
||||
return max(versions, key=pkg_version.parse)
|
||||
|
||||
# Function to extract and decode URLs of the latest version
|
||||
def extract_latest_version_links(soup, highest_version):
|
||||
rows = soup.find_all('tr', class_='opacity4')
|
||||
latest_version_links = []
|
||||
|
||||
for row in rows:
|
||||
version_td = row.find_all('td')[1].text.strip()
|
||||
if version_td == highest_version:
|
||||
href = row.find('a')['href']
|
||||
decoded_href = html.unescape(href)
|
||||
latest_version_links.append(decoded_href)
|
||||
|
||||
return latest_version_links
|
||||
|
||||
# Specify the URL to fetch the HTML content
|
||||
url = 'https://privatebin.info/directory/' # Replace with your actual URL
|
||||
|
||||
# Fetch the HTML content from the URL
|
||||
html_content = fetch_html_from_url(url)
|
||||
|
||||
# Parse the HTML content with BeautifulSoup
|
||||
soup = BeautifulSoup(html_content, 'lxml')
|
||||
|
||||
# Find the highest version number
|
||||
highest_version = find_highest_version(soup)
|
||||
print(f'Highest version found: {highest_version}')
|
||||
|
||||
# Extract and print the latest version links
|
||||
latest_version_links = extract_latest_version_links(soup, highest_version)
|
||||
return latest_version_links
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
Loading…
Reference in New Issue
Block a user