lots of improvements to pastedb01
This commit is contained in:
parent
5ffd0f0c03
commit
582844bacf
30
pastedb/old_upload.py
Normal file
30
pastedb/old_upload.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import hashlib
|
||||||
|
from services import pastie, dpaste, rentry, defau, sprunge, opendev
|
||||||
|
from data import data
|
||||||
|
|
||||||
|
def save(data):
|
||||||
|
# Upload to the available services
|
||||||
|
paste_dict = {'name': name}
|
||||||
|
successes = []
|
||||||
|
failures = []
|
||||||
|
for service in [defau]:
|
||||||
|
try:
|
||||||
|
result = service.upload(data)
|
||||||
|
add_data(result["service"], result["key"], result["md5sum"] )
|
||||||
|
successes.append(result['name'])
|
||||||
|
except Exception as e:
|
||||||
|
failures.append(f"{service.__name__}: {str(e)}")
|
||||||
|
|
||||||
|
# Print upload results
|
||||||
|
print(f"Upload successful to {len(successes)}/{len(successes)+len(failures)} services:")
|
||||||
|
for name in successes:
|
||||||
|
print(f"- {name}")
|
||||||
|
if failures:
|
||||||
|
print("Upload failed to the following services:")
|
||||||
|
for error in failures:
|
||||||
|
print(f"- {error}")
|
||||||
|
|
||||||
|
print(f"Your paste trace is: {name}")
|
@ -1 +1 @@
|
|||||||
{"name": "Joe Chuck", "age": 222, "city": "New Jersey"}
|
ed566cf8f839c936bfd5582f1af6aa7d0d8457fcdd3c421d4424eedb41d5868155a6e04a291c4add0a2ec88a287d0371605d8ff9be6cbf2ba91d67097e39c0333cbf57515d9072d998d1dee110f3ecab
|
0
pastedb/pastedb01/data/__init__.py
Normal file
0
pastedb/pastedb01/data/__init__.py
Normal file
BIN
pastedb/pastedb01/data/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/data/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/data/__pycache__/data.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/data/__pycache__/data.cpython-310.pyc
Normal file
Binary file not shown.
122
pastedb/pastedb01/data/data.py
Normal file
122
pastedb/pastedb01/data/data.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
"""
|
||||||
|
This module provides functions for managing and saving data in a dictionary object. It also supports encryption and decryption of the data when saving and loading it to/from disk. The functions in this module include:
|
||||||
|
|
||||||
|
set_encryption_key(key: bytes): sets the encryption key to be used for encrypting the data.
|
||||||
|
add_data(service_tag: str, key: str, md5sum: str) -> str: adds data for a service to the data dictionary.
|
||||||
|
save_data(filename: str, data: dict, key=None): writes the data dictionary to disk as a JSON object. If a key is provided, it encrypts the data using the AES symmetric encryption algorithm before writing to disk.
|
||||||
|
encrypt_data(data: bytes) -> bytes: encrypts data using the AES symmetric encryption algorithm.
|
||||||
|
load_data(file_path: str) -> Dict: loads the data from a file and returns it as a dictionary. If the file is encrypted, it uses the provided key to decrypt it before returning the data.
|
||||||
|
|
||||||
|
This module depends on the following packages: hashlib, Crypto, and collections.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import time
|
||||||
|
from collections import defaultdict
|
||||||
|
from Cryptodome.Cipher import AES
|
||||||
|
from Cryptodome.Util import Padding
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
|
||||||
|
data = defaultdict(lambda: {"timestamp": 0, "services": {}})
|
||||||
|
|
||||||
|
_ENCRYPTION_KEY = None
|
||||||
|
|
||||||
|
def set_encryption_key(key: bytes):
|
||||||
|
global _ENCRYPTION_KEY
|
||||||
|
_ENCRYPTION_KEY = key
|
||||||
|
|
||||||
|
|
||||||
|
def add_data(service_tag: str, key: str, md5sum: str) -> str:
|
||||||
|
"""
|
||||||
|
Adds data for a service to the `data` dictionary.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
service_tag (str): A string representing the service being added.
|
||||||
|
key (str): A string representing the key for the service being added.
|
||||||
|
md5sum (str): A string representing the MD5 checksum for the service being added.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: A string representing the unique ID of the run that the data was added to.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Generate a unique ID for the run
|
||||||
|
run_id = f"run-{hashlib.sha256(str(data).encode()).hexdigest()[:6]}"
|
||||||
|
timestamp = int(time.time())
|
||||||
|
|
||||||
|
# Add the service data to the run
|
||||||
|
data[run_id]["timestamp"] = timestamp
|
||||||
|
data[run_id]["services"][service_tag] = {"key": key, "md5sum": md5sum}
|
||||||
|
|
||||||
|
return run_id
|
||||||
|
|
||||||
|
|
||||||
|
def save_data(filename: str, data: dict, key=None):
|
||||||
|
"""
|
||||||
|
Writes the data dictionary to disk as a JSON object.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
filename (str): A string representing the filename to write the data to.
|
||||||
|
data (dict): A dictionary representing the data to be written to disk.
|
||||||
|
key (bytes): Optional bytes representing a key to use for encryption.
|
||||||
|
"""
|
||||||
|
with open(filename, "w") as f:
|
||||||
|
# Serialize the data dictionary as a JSON object
|
||||||
|
json_data = json.dumps(data)
|
||||||
|
|
||||||
|
# If a key is provided, encrypt the JSON data
|
||||||
|
if _ENCRYPTION_KEY:
|
||||||
|
# Encrypt the data using the key
|
||||||
|
encrypted_data = encrypt_data(json_data.encode())
|
||||||
|
|
||||||
|
# Write the encrypted data to the file
|
||||||
|
f.write(encrypted_data.hex())
|
||||||
|
else:
|
||||||
|
# Write the unencrypted JSON data to the file
|
||||||
|
print("you need to set the encryption key first.")
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_data(data: bytes) -> bytes:
|
||||||
|
"""
|
||||||
|
Encrypts data using the AES symmetric encryption algorithm.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
data (bytes): A bytes object representing the data to be encrypted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bytes: A bytes object representing the encrypted data.
|
||||||
|
"""
|
||||||
|
# Generate a random initialization vector (IV)
|
||||||
|
iv = os.urandom(AES.block_size)
|
||||||
|
|
||||||
|
# Pad the data to a multiple of the block size
|
||||||
|
padded_data = Padding.pad(data, AES.block_size)
|
||||||
|
|
||||||
|
# Create an AES cipher object
|
||||||
|
cipher = AES.new(_ENCRYPTION_KEY, AES.MODE_CBC, iv)
|
||||||
|
|
||||||
|
# Encrypt the data using CBC mode
|
||||||
|
encrypted_data = cipher.encrypt(padded_data)
|
||||||
|
|
||||||
|
# Prepend the IV to the encrypted data
|
||||||
|
return iv + encrypted_data
|
||||||
|
|
||||||
|
|
||||||
|
def load_data(file_path: str, key=None) -> dict:
|
||||||
|
"""
|
||||||
|
Load the data from a file and return it as a dictionary.
|
||||||
|
|
||||||
|
:param file_path: The path to the file to load.
|
||||||
|
:param key: The key to use to decrypt the file.
|
||||||
|
:return: A dictionary representing the data from the file.
|
||||||
|
"""
|
||||||
|
if _ENCRYPTION_KEY:
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
ciphertext = f.read()
|
||||||
|
|
||||||
|
fernet = Fernet(_ENCRYPTION_KEY.encode())
|
||||||
|
plaintext = fernet.decrypt(ciphertext)
|
||||||
|
return json.loads(plaintext.decode())
|
||||||
|
else:
|
||||||
|
print("you need to set the encryption key first.")
|
||||||
|
return '{"you need to set the encryption key first."}'
|
@ -1,34 +1,9 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
"name": "1680551932-b74d72",
|
"name": "1680593168-456462",
|
||||||
"pastie_blzoeg": {
|
"defau_4e062": {
|
||||||
"service": "pastie",
|
|
||||||
"key": "https://pastie.io/blzoeg",
|
|
||||||
"md5sum": "9fd3002da661e6ca38a2e8a49daafb1b"
|
|
||||||
},
|
|
||||||
"rentry_QDmcP3gr": {
|
|
||||||
"service": "Rentry",
|
|
||||||
"key": "https://rentry.co/hqcohp",
|
|
||||||
"md5sum": "9fd3002da661e6ca38a2e8a49daafb1b"
|
|
||||||
},
|
|
||||||
"dpaste_9W5AB4Y2V": {
|
|
||||||
"service": "dpaste",
|
|
||||||
"key": "https://dpaste.com/9W5AB4Y2V",
|
|
||||||
"md5sum": "9fd3002da661e6ca38a2e8a49daafb1b"
|
|
||||||
},
|
|
||||||
"sprunge_j9uXbd_9fd30": {
|
|
||||||
"service": "sprunge",
|
|
||||||
"key": "http://sprunge.us/j9uXbd",
|
|
||||||
"md5sum": "9fd3002da661e6ca38a2e8a49daafb1b"
|
|
||||||
},
|
|
||||||
"defau_9fd30": {
|
|
||||||
"service": "p.defau.lt",
|
"service": "p.defau.lt",
|
||||||
"key": "https://p.defau.lt/?ExQfLnJ_aDEKYIqkpwv6cQ",
|
"key": "https://p.defau.lt/?QtZaVCSgOsVMmI1xS_ofqw",
|
||||||
"md5sum": "9fd3002da661e6ca38a2e8a49daafb1b"
|
"md5sum": "4e062894f660d3c69640129f9fd0a09e"
|
||||||
},
|
|
||||||
"opendev_819477_9fd30": {
|
|
||||||
"service": "opendev",
|
|
||||||
"key": "https://paste.opendev.org/json/",
|
|
||||||
"md5sum": "9fd3002da661e6ca38a2e8a49daafb1b"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,22 +1,34 @@
|
|||||||
|
"""
|
||||||
|
This module defines functions to interact with the p.defau.lt service for uploading and retrieving code snippets.
|
||||||
|
|
||||||
|
Functions:
|
||||||
|
- get_service_tag(): Returns a string representing the service tag for p.defau.lt.
|
||||||
|
- upload(data): Uploads a code snippet to p.defau.lt and returns a dictionary containing metadata about the upload.
|
||||||
|
- get(trace): Retrieves the code snippet associated with the provided trace from p.defau.lt.
|
||||||
|
"""
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
def serviceTag():
|
URL = 'https://p.defau.lt/submit.php'
|
||||||
|
NAME_PREFIX = 'defau_'
|
||||||
|
|
||||||
|
def get_service_tag():
|
||||||
return 'p.defau.lt'
|
return 'p.defau.lt'
|
||||||
|
|
||||||
def upload(data):
|
def upload(data):
|
||||||
json_data = json.dumps(data)
|
json_data = json.dumps(data)
|
||||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
url = 'https://p.defau.lt/submit.php'
|
try:
|
||||||
response = requests.post(url, data={'code': json_data})
|
response = requests.post(URL, data={'code': json_data})
|
||||||
|
response.raise_for_status()
|
||||||
if response.status_code == 200:
|
|
||||||
key = response.url
|
key = response.url
|
||||||
name = 'defau_' + md5sum[:5]
|
name = f"{NAME_PREFIX}{md5sum[:5]}"
|
||||||
return {'name': name, 'service': 'p.defau.lt', 'key': key, 'md5sum': md5sum}
|
return {'name': name, 'service': get_service_tag(), 'key': key, 'md5sum': md5sum}
|
||||||
else:
|
except requests.exceptions.RequestException as e:
|
||||||
|
print(e)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get(trace):
|
def get(trace):
|
||||||
|
@ -2,6 +2,11 @@ import hashlib
|
|||||||
import json
|
import json
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
NAME = 'dpaste'
|
||||||
|
|
||||||
|
def get_service_tag():
|
||||||
|
return NAME
|
||||||
|
|
||||||
def upload(data):
|
def upload(data):
|
||||||
try:
|
try:
|
||||||
content = json.dumps(data)
|
content = json.dumps(data)
|
||||||
@ -20,10 +25,10 @@ def upload(data):
|
|||||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'service': 'dpaste',
|
'service': NAME,
|
||||||
'key': dpaste_url,
|
'key': dpaste_url,
|
||||||
'md5sum': md5sum,
|
'md5sum': md5sum,
|
||||||
'name': 'dpaste_' + dpaste_url.rsplit('/', 1)[-1]
|
'name': NAME + '_' + dpaste_url.rsplit('/', 1)[-1]
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -2,7 +2,11 @@ import requests
|
|||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
url = 'https://paste.opendev.org/json/'
|
URL = 'https://paste.opendev.org/json/'
|
||||||
|
NAME = 'opendev'
|
||||||
|
|
||||||
|
def get_service_tag():
|
||||||
|
return NAME
|
||||||
|
|
||||||
def upload(data):
|
def upload(data):
|
||||||
try:
|
try:
|
||||||
@ -17,7 +21,7 @@ def upload(data):
|
|||||||
}
|
}
|
||||||
|
|
||||||
# send request to create new paste
|
# send request to create new paste
|
||||||
response = requests.post(url + '?method=pastes.newPaste', json=payload)
|
response = requests.post(URL + '?method=pastes.newPaste', json=payload)
|
||||||
|
|
||||||
status = response.status_code
|
status = response.status_code
|
||||||
paste_id = response.json()['data']
|
paste_id = response.json()['data']
|
||||||
@ -27,10 +31,10 @@ def upload(data):
|
|||||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'service': "opendev",
|
'service': NAME,
|
||||||
'key': url,
|
'key': URL[:-6],
|
||||||
'md5sum': md5sum,
|
'md5sum': md5sum,
|
||||||
'name': 'opendev_' + paste_id + '_' + md5sum[:5]
|
'name': NAME + '_' + paste_id + '_' + md5sum[:5]
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -2,6 +2,13 @@ import requests
|
|||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
|
||||||
|
URL = 'https://pastie.io/documents'
|
||||||
|
NAME = 'pastie'
|
||||||
|
|
||||||
|
def get_service_tag():
|
||||||
|
return NAME
|
||||||
|
|
||||||
def upload(data):
|
def upload(data):
|
||||||
try:
|
try:
|
||||||
json_data = json.dumps(data)
|
json_data = json.dumps(data)
|
||||||
@ -13,8 +20,8 @@ def upload(data):
|
|||||||
# print(f'JSON object uploaded to Pastie: {pastie_url}')
|
# print(f'JSON object uploaded to Pastie: {pastie_url}')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"service": "pastie",
|
"service": NAME,
|
||||||
"name": 'pastie_' + key,
|
"name": NAME + '_' + key,
|
||||||
"key": pastie_url,
|
"key": pastie_url,
|
||||||
"md5sum": md5sum
|
"md5sum": md5sum
|
||||||
}
|
}
|
||||||
|
@ -4,9 +4,15 @@ import hashlib
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
COMMAND = './external/rentry'
|
||||||
|
NAME = 'rentry'
|
||||||
|
|
||||||
|
def get_service_tag():
|
||||||
|
return NAME
|
||||||
|
|
||||||
def upload(data):
|
def upload(data):
|
||||||
md5sum = hashlib.md5(json.dumps(data).encode('utf-8')).hexdigest()
|
md5sum = hashlib.md5(json.dumps(data).encode('utf-8')).hexdigest()
|
||||||
command = ['./external/rentry', 'new', json.dumps(data)]
|
command = [ COMMAND, 'new', json.dumps(data)]
|
||||||
output = subprocess.check_output(command, universal_newlines=True)
|
output = subprocess.check_output(command, universal_newlines=True)
|
||||||
#print(output)
|
#print(output)
|
||||||
lines = output.strip().split('\n')
|
lines = output.strip().split('\n')
|
||||||
|
@ -3,7 +3,12 @@ import json
|
|||||||
import hashlib
|
import hashlib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
url = 'http://sprunge.us'
|
URL = 'http://sprunge.us'
|
||||||
|
NAME = 'sprunge'
|
||||||
|
|
||||||
|
def get_service_tag():
|
||||||
|
return NAME
|
||||||
|
|
||||||
|
|
||||||
def upload(data):
|
def upload(data):
|
||||||
try:
|
try:
|
||||||
@ -11,7 +16,7 @@ def upload(data):
|
|||||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
# Send the HTTP POST request to the Sprunge API
|
# Send the HTTP POST request to the Sprunge API
|
||||||
response = requests.post(url, data={'sprunge': json_data})
|
response = requests.post(URL, data={'sprunge': json_data})
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
|
|
||||||
# Get the URL of the uploaded text from the response body
|
# Get the URL of the uploaded text from the response body
|
||||||
|
@ -1,46 +1,24 @@
|
|||||||
import json
|
from typing import Dict
|
||||||
import time
|
|
||||||
import hashlib
|
|
||||||
from services import pastie, dpaste, rentry, defau, sprunge, opendev
|
from services import pastie, dpaste, rentry, defau, sprunge, opendev
|
||||||
|
from data.data import add_data, save_data, set_encryption_key
|
||||||
|
|
||||||
# Load the JSON object from file
|
|
||||||
with open('data.json') as f:
|
|
||||||
data = json.load(f)
|
|
||||||
|
|
||||||
# Generate a unique name for the upload
|
def upload_and_store(data: Dict):
|
||||||
name = f"{time.time():.0f}-{hashlib.sha256(str(data).encode()).hexdigest()[:6]}"
|
# set encryption key if necessary
|
||||||
|
set_encryption_key(b'upload_id=upload_and_store(my_da')
|
||||||
|
|
||||||
# Upload to the available services
|
traces = {}
|
||||||
paste_dict = {'name': name}
|
for service in [pastie, dpaste, rentry, defau, sprunge, opendev]:
|
||||||
successes = []
|
|
||||||
failures = []
|
|
||||||
for service in [pastie, rentry, dpaste, sprunge, defau, opendev]:
|
|
||||||
try:
|
|
||||||
result = service.upload(data)
|
result = service.upload(data)
|
||||||
paste_dict[result['name']] = {
|
if result:
|
||||||
'service': result['service'],
|
traces[result['name']] = result['key']
|
||||||
'key': result['key'],
|
add_data(service.get_service_tag(), result['name'], result['md5sum'])
|
||||||
'md5sum': result['md5sum'],
|
save_data('data.json', data, key=b'my_key123')
|
||||||
}
|
return traces
|
||||||
successes.append(result['name'])
|
|
||||||
except Exception as e:
|
|
||||||
failures.append(f"{service.__name__}: {str(e)}")
|
|
||||||
|
|
||||||
# Update the paste_dict file
|
my_data = {"name": "Sashenka", "age": 26, "country": "Anguilla"}
|
||||||
with open('paste_dict.json', 'r+') as f:
|
|
||||||
try:
|
|
||||||
paste_dict.update(json.load(f))
|
|
||||||
f.seek(0)
|
|
||||||
except json.decoder.JSONDecodeError:
|
|
||||||
pass # ignore error if file is empty
|
|
||||||
json.dump(paste_dict, f, indent=2)
|
|
||||||
|
|
||||||
# Print upload results
|
upload_trace = upload_and_store(my_data)
|
||||||
print(f"Upload successful to {len(successes)}/{len(successes)+len(failures)} services:")
|
|
||||||
for name in successes:
|
print('trace: ', upload_trace)
|
||||||
print(f"- {name}")
|
|
||||||
if failures:
|
|
||||||
print("Upload failed to the following services:")
|
|
||||||
for error in failures:
|
|
||||||
print(f"- {error}")
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user