Compare commits
23 Commits
30b8c4b274
...
master
Author | SHA1 | Date | |
---|---|---|---|
dc393eecf5 | |||
6528b2401e | |||
8ae23ccf56 | |||
1f30abb6ba | |||
b866ac06b4 | |||
5361afec07 | |||
f5c27bb5da | |||
ec4104b2d1 | |||
582844bacf | |||
5ffd0f0c03 | |||
32d3e2dfe1 | |||
c02ea2849a | |||
8800275ed5 | |||
c5e08f3a76 | |||
a60d269535 | |||
52d380a4ee | |||
21f0d0c229 | |||
cb33802e87 | |||
ae14376b13 | |||
0454d945c1 | |||
ab3327464c | |||
dd86cecec2 | |||
1751a7f763 |
22
README.md
Normal file
22
README.md
Normal file
@ -0,0 +1,22 @@
|
||||
# Coding with ChatGTP
|
||||
|
||||
On this repository is my random coding done typically late at night when I'm too tired to do anything else... :)
|
||||
|
||||
## some pointters:
|
||||
|
||||
- btc_tracker
|
||||
1. Server/fetcher: fetching bitcoin ohlc data from multiple sources and offer it forward with simple api.
|
||||
2. Client: Fetch data from the server and show it via gnuplot chart.
|
||||
|
||||
- btc_wallet
|
||||
Simple python3 script's to create bitcoin wallet and storing it on KeePassXC database.
|
||||
Still planning to create ondemand key generation and transaction signing.
|
||||
|
||||
- pastedb
|
||||
Trying to make simple database on top of pastebin like services.
|
||||
|
||||
- encryption-on-apis & webapp & webpage-stuff
|
||||
Some random testing about creating simple and secure web services.
|
||||
|
||||
- letters
|
||||
Saved ChatGPT chats and messages
|
26
TryUTF8Read.py
Executable file
26
TryUTF8Read.py
Executable file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/python3
|
||||
import sys
|
||||
import codecs
|
||||
|
||||
def test_utf8_encoding(filepath):
|
||||
try:
|
||||
with codecs.open(filepath, 'r', 'utf-8') as testfile:
|
||||
testfile.read()
|
||||
#print(f"{filepath} is openable with UTF-8 encoding.")
|
||||
except UnicodeDecodeError:
|
||||
print(f"{filepath} is not openable with UTF-8 encoding.")
|
||||
print(f"Converting {filepath} from ISO-8859-1 to UTF-8...")
|
||||
with codecs.open(filepath, 'r', 'iso-8859-1') as f:
|
||||
content = f.read()
|
||||
utf8_content = content.decode('iso-8859-1').encode('utf-8')
|
||||
with codecs.open(filepath, 'w', 'utf-8') as f:
|
||||
f.write(utf8_content)
|
||||
print(f"{filepath} has been converted to UTF-8.")
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Please provide a file name as a command line argument.")
|
||||
sys.exit(1)
|
||||
|
||||
filepath = sys.argv[1]
|
||||
test_utf8_encoding(filepath)
|
||||
|
Binary file not shown.
20
btc_tracker/01042023/TheClient/bin/TheClient.py
Normal file
20
btc_tracker/01042023/TheClient/bin/TheClient.py
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
import os
|
||||
import configparser
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from TheClient.database.db_utils import Database
|
||||
from TheClient.graphing.graph_utils import Graph
|
||||
|
||||
def load_config(config_file):
|
||||
config = configparser.ConfigParser()
|
||||
config.read(config_file)
|
||||
return config
|
||||
|
||||
def get_btc_price():
|
||||
response = requests.get('https://api.binance.com/api/v3/avgPrice', params={'symbol': 'BTCUSDT'})
|
||||
json_data = response.json()
|
||||
return float(json_data['price'])
|
||||
|
||||
def main():
|
||||
config = load_config(os.path.join(os.path.dirname(__file__), '..', 'config', 'The
|
0
btc_tracker/01042023/TheClient/database/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/database/__init__.py
Normal file
27
btc_tracker/01042023/TheClient/database/db_utils.py
Normal file
27
btc_tracker/01042023/TheClient/database/db_utils.py
Normal file
@ -0,0 +1,27 @@
|
||||
import sqlite3
|
||||
|
||||
class Database():
|
||||
def __init__(self, db_file):
|
||||
self.db_file = db_file
|
||||
self._create_table()
|
||||
|
||||
def _create_table(self):
|
||||
with sqlite3.connect(self.db_file) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS timeseries
|
||||
(timestamp INTEGER PRIMARY KEY, value REAL)''')
|
||||
conn.commit()
|
||||
|
||||
def insert_data(self, timestamp, value):
|
||||
with sqlite3.connect(self.db_file) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''INSERT INTO timeseries (timestamp, value)
|
||||
VALUES (?, ?)''', (timestamp, value))
|
||||
conn.commit()
|
||||
|
||||
def fetch_data(self, limit):
|
||||
with sqlite3.connect(self.db_file) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''SELECT timestamp, value FROM timeseries
|
||||
ORDER BY timestamp DESC LIMIT ?''', (limit,))
|
||||
return cursor.fetchall()
|
0
btc_tracker/01042023/TheClient/fetch/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/fetch/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/graphing/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/graphing/__init__.py
Normal file
12
btc_tracker/01042023/TheClient/graphing/graph_utils.py
Normal file
12
btc_tracker/01042023/TheClient/graphing/graph_utils.py
Normal file
@ -0,0 +1,12 @@
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
class Graph():
|
||||
def __init__(self, xdata, ydata):
|
||||
self.fig, self.ax = plt.subplots()
|
||||
self.line, = self.ax.plot(xdata, ydata)
|
||||
|
||||
def update_graph(self, xdata, ydata):
|
||||
self.line.set_data(xdata, ydata)
|
||||
self.ax.relim()
|
||||
self.ax.autoscale_view()
|
||||
self.fig.canvas.draw()
|
2
btc_tracker/01042023/TheClient/requirements.txt
Normal file
2
btc_tracker/01042023/TheClient/requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
requests==2.25.1
|
||||
matplotlib==3.6.2
|
0
btc_tracker/01042023/TheClient/tests/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/tests/__init__.py
Normal file
@ -1,53 +1,94 @@
|
||||
#!/usr/bin/python3
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from few market places and serve it forward with simple json api.
|
||||
|
||||
import krakenex
|
||||
import json, sqlite3
|
||||
import requests, os, time
|
||||
Creates: ./btc_ohlc.db
|
||||
serves: localhost:5000/[t] and /serverkey
|
||||
Authentication via auth header with signatures
|
||||
"""
|
||||
|
||||
import math
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
import sqlite3
|
||||
import binascii
|
||||
import threading
|
||||
from flask import Flask, request
|
||||
from hashlib import sha256
|
||||
import requests
|
||||
import ecdsa
|
||||
import krakenex
|
||||
from flask import Flask, jsonify, request
|
||||
|
||||
database = "btc_ohlc.db"
|
||||
# from Cryptodome.Cipher import AES
|
||||
|
||||
DATABASE = "btc_ohlc.db"
|
||||
KEYSFILE = "userkeys.json"
|
||||
app = Flask(__name__)
|
||||
|
||||
database_lock = threading.Lock()
|
||||
## Generate the ECDSA keys for this instance
|
||||
print("Generating ECDSA keys for this instance... just wait a bit...")
|
||||
server_private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
server_public_key = server_private_key.get_verifying_key()
|
||||
# We need the hexadecimal form for sharing over http/json
|
||||
server_public_key_hex = binascii.hexlify(server_public_key.to_string()).decode("utf-8")
|
||||
|
||||
|
||||
# Empty response json
|
||||
empty_dict = {"exchange": "", "timestamp": 0, "open": 0, "high": 0, "low": 0, "close": 0, "volume_quote": 0, "volume_base": 0, "trades": 0}
|
||||
empty_dict = {
|
||||
"exchange": "",
|
||||
"timestamp": 0,
|
||||
"open": 0,
|
||||
"high": 0,
|
||||
"low": 0,
|
||||
"close": 0,
|
||||
"volume_quote": 0,
|
||||
"volume_base": 0,
|
||||
"trades": 0,
|
||||
}
|
||||
empty_json = json.dumps(empty_dict)
|
||||
|
||||
def Checkthedatabase():
|
||||
## Some sanity for the database
|
||||
# check if btc_timeseries.db database file exists
|
||||
if not os.path.exists(database):
|
||||
db = sqlite3.connect(database)
|
||||
|
||||
db.execute("""\
|
||||
CREATE TABLE ohlc (
|
||||
id INTEGER PRIMARY KEY,
|
||||
exchange TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
open REAL NOT NULL,
|
||||
high REAL NOT NULL,
|
||||
low REAL NOT NULL,
|
||||
close REAL NOT NULL,
|
||||
volume_quote REAL NOT NULL,
|
||||
volume_base REAL NOT NULL,
|
||||
trades INTEGER NOT NULL )""")
|
||||
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
db = sqlite3.connect(database)
|
||||
|
||||
# Check if the table exists
|
||||
table_exists = False
|
||||
cursor = db.execute("PRAGMA table_info(ohlc)")
|
||||
for row in cursor:
|
||||
table_exists = True
|
||||
|
||||
# Create the table if it doesn't exist
|
||||
if not table_exists:
|
||||
db.execute("""\
|
||||
def read_keys():
|
||||
"""
|
||||
Reads the declared KEYSFILE and returns "user_publickeys" from it.
|
||||
Returns: json object
|
||||
"""
|
||||
empty_userkeys = {
|
||||
"user_publickeys": {
|
||||
"user_name": "user_ecdsa_public key"
|
||||
}
|
||||
}
|
||||
# test if the file is there, we could make one if there is non
|
||||
try:
|
||||
# ascii is subset of UTF-8, this should be quite safe...
|
||||
with open(KEYSFILE, "r", encoding='utf-8') as cfile:
|
||||
user_keys = json.load(cfile)
|
||||
|
||||
if 'user_publickeys' not in user_keys:
|
||||
print('Missing required property "user_publickeys" in config file')
|
||||
sys.exit(1)
|
||||
if user_keys == empty_userkeys:
|
||||
print('Your userkeys.json file seems to not filled. Please insert your key there.')
|
||||
sys.exit(1)
|
||||
|
||||
return user_keys["user_publickeys"]
|
||||
except FileNotFoundError:
|
||||
with open(KEYSFILE, "w", encoding='utf-8') as nfile:
|
||||
print('You seem be lacking the users key file, created example for you. Exiting')
|
||||
json.dump(empty_userkeys, nfile, indent=2)
|
||||
sys.exit(1)
|
||||
|
||||
def check_database():
|
||||
"""
|
||||
Check the database for the 'ohlc' table.
|
||||
If the database file or the table does not exist, create them.
|
||||
"""
|
||||
if not os.path.exists(DATABASE):
|
||||
new_db = sqlite3.connect(DATABASE)
|
||||
new_db.execute(
|
||||
"""\
|
||||
CREATE TABLE ohlc (
|
||||
id INTEGER PRIMARY KEY,
|
||||
exchange TEXT NOT NULL,
|
||||
@ -58,178 +99,341 @@ def Checkthedatabase():
|
||||
close REAL NOT NULL,
|
||||
volume_quote REAL NOT NULL,
|
||||
volume_base REAL NOT NULL,
|
||||
trades INTEGER NOT NULL )""")
|
||||
db.commit()
|
||||
trades INTEGER NOT NULL )"""
|
||||
)
|
||||
|
||||
new_db.commit()
|
||||
new_db.close()
|
||||
|
||||
new_db = sqlite3.connect(DATABASE)
|
||||
# Check if the table exists
|
||||
|
||||
table_exists = False
|
||||
cursor = new_db.execute("PRAGMA table_info(ohlc)")
|
||||
for row in cursor:
|
||||
table_exists = True
|
||||
|
||||
# Create the table if it doesn't exist
|
||||
if not table_exists:
|
||||
new_db.execute(
|
||||
"""\
|
||||
CREATE TABLE ohlc (
|
||||
id INTEGER PRIMARY KEY,
|
||||
exchange TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
open REAL NOT NULL,
|
||||
high REAL NOT NULL,
|
||||
low REAL NOT NULL,
|
||||
close REAL NOT NULL,
|
||||
volume_quote REAL NOT NULL,
|
||||
volume_base REAL NOT NULL,
|
||||
trades INTEGER NOT NULL )"""
|
||||
)
|
||||
new_db.commit()
|
||||
|
||||
|
||||
def fetch_kraken():
|
||||
### Kraken
|
||||
kraken = krakenex.API()
|
||||
|
||||
response = kraken.query_public('OHLC', {'pair': 'BTCUSD', 'interval': 240 })
|
||||
ohlc_data = response['result']['XXBTZUSD']
|
||||
|
||||
candle_stick_data = {
|
||||
'exchange': 'kraken',
|
||||
'timestamp': ohlc_data[1][0],
|
||||
'open': ohlc_data[0][1],
|
||||
'high': max(item[2] for item in ohlc_data),
|
||||
'low': min(item[3] for item in ohlc_data),
|
||||
'close': ohlc_data[-1][4],
|
||||
'volume_quote': sum(float(item[5]) for item in ohlc_data),
|
||||
'volume_base': sum(float(item[6]) for item in ohlc_data),
|
||||
'trades': sum(item[7] for item in ohlc_data),
|
||||
}
|
||||
|
||||
kraken_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Kraken: OK")
|
||||
#print(kraken_json)
|
||||
return kraken_json
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from Kraken in json.
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
kraken = krakenex.API()
|
||||
|
||||
response = kraken.query_public("OHLC", {"pair": "BTCUSD", "interval": 240})
|
||||
ohlc_data = response["result"]["XXBTZUSD"]
|
||||
|
||||
candle_stick_data = {
|
||||
"exchange": "kraken",
|
||||
"timestamp": ohlc_data[1][0],
|
||||
"open": ohlc_data[0][1],
|
||||
"high": max(item[2] for item in ohlc_data),
|
||||
"low": min(item[3] for item in ohlc_data),
|
||||
"close": ohlc_data[-1][4],
|
||||
"volume_quote": sum(float(item[5]) for item in ohlc_data),
|
||||
"volume_base": sum(float(item[6]) for item in ohlc_data),
|
||||
"trades": sum(item[7] for item in ohlc_data),
|
||||
}
|
||||
|
||||
kraken_json = json.dumps(candle_stick_data, indent=2)
|
||||
return kraken_json
|
||||
|
||||
|
||||
def fetch_bitstamp():
|
||||
## Bitstamp
|
||||
response = requests.get("https://www.bitstamp.net/api/v2/ohlc/btcusd/?step=300&limit=1")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
bitstamp_data = response.json()
|
||||
ohlc_data = bitstamp_data["data"]["ohlc"]
|
||||
|
||||
candle_stick_data = {
|
||||
'exchange': 'bitstamp',
|
||||
'timestamp': int(ohlc_data[0]['timestamp']),
|
||||
'open': float(ohlc_data[0]['open']),
|
||||
'high': float(ohlc_data[0]['high']),
|
||||
'low': float(ohlc_data[0]['low']),
|
||||
'close': float(ohlc_data[0]['close']),
|
||||
'volume_quote': float(ohlc_data[0]['volume']),
|
||||
'volume_base': 0, # not provided by Bitstamp API
|
||||
'trades': 0, # not provided by Bitstamp API
|
||||
}
|
||||
|
||||
bitstamp_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Bitstamp: OK")
|
||||
# print(bitstamp_json)
|
||||
return bitstamp_json
|
||||
else:
|
||||
"""
|
||||
Fetch Bitstamp data ja serve it as json.
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
response = requests.get(
|
||||
"https://www.bitstamp.net/api/v2/ohlc/btcusd/?step=300&limit=1"
|
||||
)
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
bitstamp_data = response.json()
|
||||
ohlc_data = bitstamp_data["data"]["ohlc"]
|
||||
|
||||
# the bitstamp api gives timestamp with decimals to show less than second accuracy.
|
||||
bitstamp_timestamp = int(math.floor(float(ohlc_data[0]["timestamp"])))
|
||||
|
||||
candle_stick_data = {
|
||||
"exchange": "bitstamp",
|
||||
"timestamp": bitstamp_timestamp,
|
||||
"open": float(ohlc_data[0]["open"]),
|
||||
"high": float(ohlc_data[0]["high"]),
|
||||
"low": float(ohlc_data[0]["low"]),
|
||||
"close": float(ohlc_data[0]["close"]),
|
||||
"volume_quote": float(ohlc_data[0]["volume"]),
|
||||
"volume_base": 0, # not provided by Bitstamp API
|
||||
"trades": 0, # not provided by Bitstamp API
|
||||
}
|
||||
|
||||
bitstamp_json = json.dumps(candle_stick_data, indent=2)
|
||||
return bitstamp_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Bitstamp API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
|
||||
def fetch_bitfinex():
|
||||
## Bitfinex
|
||||
response = requests.get("https://api-pub.bitfinex.com/v2/candles/trade:5m:tBTCUSD/last")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
ohlc_data = response.json()
|
||||
candle_stick_data = {
|
||||
'exchange': 'bitfinex',
|
||||
'timestamp': ohlc_data[0],
|
||||
'open': ohlc_data[1],
|
||||
'high': ohlc_data[2],
|
||||
'low': ohlc_data[3],
|
||||
'close': ohlc_data[4],
|
||||
'volume_quote': ohlc_data[5],
|
||||
'volume_base': 0, # not provided by Bitfinex API
|
||||
'trades': 0, # not provided by Bitfinex API
|
||||
}
|
||||
|
||||
bitfinex_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Bitfinex: OK")
|
||||
#print(bitfinex_json)
|
||||
return bitfinex_json
|
||||
else:
|
||||
"""
|
||||
Bitfinex
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
response = requests.get(
|
||||
"https://api-pub.bitfinex.com/v2/candles/trade:5m:tBTCUSD/last"
|
||||
)
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
ohlc_data = response.json()
|
||||
candle_stick_data = {
|
||||
"exchange": "bitfinex",
|
||||
"timestamp": ohlc_data[0],
|
||||
"open": ohlc_data[1],
|
||||
"high": ohlc_data[2],
|
||||
"low": ohlc_data[3],
|
||||
"close": ohlc_data[4],
|
||||
"volume_quote": ohlc_data[5],
|
||||
"volume_base": 0, # not provided by Bitfinex API
|
||||
"trades": 0, # not provided by Bitfinex API
|
||||
}
|
||||
|
||||
bitfinex_json = json.dumps(candle_stick_data, indent=2)
|
||||
return bitfinex_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Bitfinex API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
|
||||
def fetch_gemini():
|
||||
## Gemini
|
||||
response = requests.get("https://api.gemini.com/v2/candles/btcusd/5m")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
gemini_ohlc = response.json()
|
||||
candle_stick_data = {
|
||||
'exchange': 'gemini',
|
||||
'timestamp': gemini_ohlc[0][0],
|
||||
'open': gemini_ohlc[0][1],
|
||||
'high': gemini_ohlc[0][2],
|
||||
'low': gemini_ohlc[0][3],
|
||||
'close': gemini_ohlc[0][4],
|
||||
'volume_quote': 0, # not provided by Gemini API
|
||||
'volume_base': gemini_ohlc[0][5],
|
||||
'trades': 0, # not provided by Gemini API
|
||||
}
|
||||
gemini_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Gemini: OK")
|
||||
#print(gemini_json)
|
||||
return gemini_json
|
||||
else:
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from Gemini
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
response = requests.get("https://api.gemini.com/v2/candles/btcusd/5m")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
gemini_ohlc = response.json()
|
||||
candle_stick_data = {
|
||||
"exchange": "gemini",
|
||||
"timestamp": gemini_ohlc[0][0],
|
||||
"open": gemini_ohlc[0][1],
|
||||
"high": gemini_ohlc[0][2],
|
||||
"low": gemini_ohlc[0][3],
|
||||
"close": gemini_ohlc[0][4],
|
||||
"volume_quote": 0, # not provided by Gemini API
|
||||
"volume_base": gemini_ohlc[0][5],
|
||||
"trades": 0, # not provided by Gemini API
|
||||
}
|
||||
gemini_json = json.dumps(candle_stick_data, indent=2)
|
||||
return gemini_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Gemini API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
def write_dict_to_database(in_dict, connection):
|
||||
cursor = connection.cursor()
|
||||
# Use placeholders for the values in the INSERT statement
|
||||
insert_query = "INSERT INTO ohlc (exchange, timestamp, open, high, low, close, volume_quote, volume_base, trades) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
values = (in_dict['exchange'],
|
||||
in_dict['timestamp'],
|
||||
in_dict['open'],
|
||||
in_dict['high'],
|
||||
in_dict['low'],
|
||||
in_dict['close'],
|
||||
in_dict['volume_quote'],
|
||||
in_dict['volume_base'],
|
||||
in_dict['trades'])
|
||||
## apply lock while writing to database
|
||||
with database_lock:
|
||||
cursor.execute(insert_query, values)
|
||||
connection.commit()
|
||||
def fetch_bybit():
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from Bybit
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
base_url = (
|
||||
"https://api.bybit.com/v2/public/kline/list?symbol=BTCUSD&interval=5&from="
|
||||
)
|
||||
current_unixtime = int(time.time())
|
||||
last_minute = math.floor(current_unixtime / 60)
|
||||
last_minute_unixtime = str(last_minute * 60 - 300)
|
||||
query_url = "".join([base_url, last_minute_unixtime])
|
||||
response = requests.get(query_url)
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
bybit_ohlc = response.json()
|
||||
candle_stick_data = {
|
||||
"exchange": "bybit",
|
||||
"timestamp": bybit_ohlc["result"][0]["open_time"],
|
||||
"open": bybit_ohlc["result"][0]["open"],
|
||||
"high": bybit_ohlc["result"][0]["high"],
|
||||
"low": bybit_ohlc["result"][0]["low"],
|
||||
"close": bybit_ohlc["result"][0]["close"],
|
||||
"volume_quote": bybit_ohlc["result"][0]["volume"],
|
||||
"volume_base": bybit_ohlc["result"][0]["turnover"],
|
||||
"trades": 0,
|
||||
}
|
||||
bybit_json = json.dumps(candle_stick_data, indent=2)
|
||||
return bybit_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Bybit API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
|
||||
def write_dict_to_database(in_dict, connection):
|
||||
"""
|
||||
Writes given dict to given database.
|
||||
Arguments: dict, db.connection()
|
||||
Uses shared global database_lock.
|
||||
"""
|
||||
cursor = connection.cursor()
|
||||
# use placeholders for the values in the insert statement
|
||||
insert_query = "insert into ohlc (exchange, timestamp, open, high, low, close, volume_quote, volume_base, trades) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
values = (
|
||||
in_dict["exchange"],
|
||||
in_dict["timestamp"],
|
||||
in_dict["open"],
|
||||
in_dict["high"],
|
||||
in_dict["low"],
|
||||
in_dict["close"],
|
||||
in_dict["volume_quote"],
|
||||
in_dict["volume_base"],
|
||||
in_dict["trades"],
|
||||
)
|
||||
## apply lock while writing to database
|
||||
with database_lock:
|
||||
cursor.execute(insert_query, values)
|
||||
connection.commit()
|
||||
|
||||
|
||||
def get_the_data():
|
||||
#cursor = db.cursor()
|
||||
while True:
|
||||
db = sqlite3.connect(database)
|
||||
write_dict_to_database(json.loads(fetch_kraken()), db)
|
||||
write_dict_to_database(json.loads(fetch_bitfinex()), db)
|
||||
write_dict_to_database(json.loads(fetch_bitstamp()), db)
|
||||
write_dict_to_database(json.loads(fetch_gemini()), db)
|
||||
db.close()
|
||||
print("fetches done at", time.time(), "sleeping now for 290")
|
||||
time.sleep(290)
|
||||
|
||||
@app.route('/')
|
||||
"""
|
||||
Creates infinite While True loop to fetch OHLC data and save it to database.
|
||||
"""
|
||||
while True:
|
||||
ohlc_db = sqlite3.connect(DATABASE)
|
||||
write_dict_to_database(json.loads(fetch_kraken()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_bitfinex()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_bitstamp()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_gemini()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_bybit()), ohlc_db)
|
||||
ohlc_db.close()
|
||||
print("fetches done at", time.time(), "sleeping now for 290")
|
||||
time.sleep(290)
|
||||
|
||||
|
||||
def check_auth(text, signature):
|
||||
"""
|
||||
Check signatures against known public keys
|
||||
Arguments: text, signature
|
||||
Reads: Global public user_publickeys dict.
|
||||
Returns: True / False
|
||||
"""
|
||||
## Make bytes-object from given signature
|
||||
sig_bytes = bytes.fromhex(signature)
|
||||
## We will iterate over all user keys to determ who is we are talking to and should they have access
|
||||
for key, value in user_publickeys.items():
|
||||
## Create bytes-object from the public in 'value' variable
|
||||
## and use it to create VerifyingKey (vk)
|
||||
public_key_bytes = bytes.fromhex(value)
|
||||
verifying_key = ecdsa.VerifyingKey.from_string(
|
||||
public_key_bytes, curve=ecdsa.SECP256k1
|
||||
)
|
||||
try:
|
||||
verifying_key.verify(sig_bytes, bytes(text, "utf-8"))
|
||||
print("user is", key)
|
||||
|
||||
return True
|
||||
except ecdsa.BadSignatureError:
|
||||
return False
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def get_data():
|
||||
# Get the time (t) argument from the url"
|
||||
query_timestamp = request.args.get('t')
|
||||
query_pretty = request.args.get('pretty')
|
||||
|
||||
database_lock.acquire()
|
||||
db = sqlite3.connect(database)
|
||||
if query_timestamp:
|
||||
rows = db.execute("SELECT exchange, timestamp, open, high, low, close FROM ohlc WHERE timestamp > ?", (query_timestamp,)).fetchall()
|
||||
else:
|
||||
rows = db.execute('SELECT exchange, timestamp, open, high, low, close FROM ohlc').fetchall()
|
||||
query_timestamp = 0
|
||||
"""
|
||||
Serve the data from the database. Limit the responses by given timestamp.
|
||||
The pretty thing is under consideration...
|
||||
"""
|
||||
# Get the time (t) argument from the url"
|
||||
query_timestamp = request.args.get("t")
|
||||
# Should we make output pretty for curl users?
|
||||
query_pretty = request.args.get("pretty")
|
||||
|
||||
database_lock.release()
|
||||
data = {
|
||||
"timestamp": time.time(),
|
||||
"rows": rows
|
||||
}
|
||||
# Authentication header, signatured the query with private key of a user
|
||||
signature = request.headers.get("auth")
|
||||
get_url = request.url
|
||||
if not check_auth(get_url, signature):
|
||||
return "Access denied! Check your keys, maybe.", 403
|
||||
|
||||
if query_pretty:
|
||||
response = json.dumps(data, indent=2, separators=(';\n', ' :'))
|
||||
else:
|
||||
response = json.dumps(data)
|
||||
|
||||
return response, 200, {'Content-Type': 'application/json'}
|
||||
with database_lock:
|
||||
btc_db = sqlite3.connect(DATABASE)
|
||||
if query_timestamp:
|
||||
rows = btc_db.execute(
|
||||
"SELECT exchange, timestamp, open, high, low, close FROM ohlc WHERE timestamp > ? ORDER BY timestamp",
|
||||
(query_timestamp,),
|
||||
).fetchall()
|
||||
else:
|
||||
rows = btc_db.execute(
|
||||
"SELECT exchange, timestamp, open, high, low, close FROM ohlc ORDER BY timestamp"
|
||||
).fetchall()
|
||||
query_timestamp = 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Make sanity checks for the database
|
||||
Checkthedatabase()
|
||||
data = {"timestamp": time.time(), "rows": rows}
|
||||
|
||||
# Start the data fetching backend process
|
||||
fetch_thread = threading.Thread(target=get_the_data)
|
||||
fetch_thread.daemon = True
|
||||
fetch_thread.start()
|
||||
|
||||
# Start the Flask app
|
||||
app.run()
|
||||
# make sha256 checksum and append it to the data object
|
||||
data_shasum = sha256(json.dumps(data).encode("utf-8")).hexdigest()
|
||||
updated_data = {"shasum": data_shasum}
|
||||
updated_data.update(data)
|
||||
data = updated_data
|
||||
|
||||
# sign the response
|
||||
signature = server_private_key.sign(json.dumps(data).encode("utf-8"))
|
||||
signature_hex = binascii.hexlify(signature).decode("utf-8")
|
||||
data["signature"] = signature_hex
|
||||
|
||||
if query_pretty:
|
||||
response = json.dumps(data, indent=2, separators=(";\n", " :"))
|
||||
else:
|
||||
response = json.dumps(data)
|
||||
return response, 200, {"Content-Type": "application/json"}
|
||||
|
||||
|
||||
@app.route("/serverkey")
|
||||
def give_serverkey():
|
||||
"""
|
||||
Serve the public keys of this instace to the world.
|
||||
"""
|
||||
## This endpoint also under Authentication?
|
||||
signature = request.headers.get("auth")
|
||||
get_url = request.url
|
||||
if not check_auth(get_url, signature):
|
||||
return "Access denied! Check your keys, maybe.", 403
|
||||
|
||||
return jsonify({"public_key": server_public_key_hex})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Make sanity checks for the database
|
||||
check_database()
|
||||
database_lock = threading.Lock()
|
||||
|
||||
# Get the users public keys
|
||||
user_publickeys = read_keys()
|
||||
|
||||
# Start the data fetching backend process§
|
||||
fetch_thread = threading.Thread(target=get_the_data)
|
||||
fetch_thread.daemon = True
|
||||
fetch_thread.start()
|
||||
|
||||
# Start the Flask app
|
||||
app.run()
|
||||
|
42
btc_tracker/kraken_fetch_client.py
Normal file
42
btc_tracker/kraken_fetch_client.py
Normal file
@ -0,0 +1,42 @@
|
||||
#!/usr/bin/python3
|
||||
"""
|
||||
client for kraken_fetch.py
|
||||
"""
|
||||
import requests
|
||||
import ecdsa
|
||||
import sqlite3
|
||||
|
||||
config_file = "./kraken_fetch_client.conf"
|
||||
# Just for testing and early development
|
||||
private_key = '039e1c137aa296d7af0cd55b468018ad1020949c2731e5141d032b8371490f48'
|
||||
|
||||
|
||||
def read_config():
|
||||
"""
|
||||
read $config_file and returns users private key
|
||||
"""
|
||||
|
||||
return private_key
|
||||
|
||||
def get_server_public_key(url):
|
||||
"""
|
||||
fetches the servers public key
|
||||
"""
|
||||
rurl = url+'/serverkey'
|
||||
response = requests.get(rurl)
|
||||
if response.status_code == 200: # if the fetch was success...
|
||||
server_public_key = response.content
|
||||
return server_public_key
|
||||
# when the fetch was not successfull
|
||||
print(f"Error fetching data from the server.")
|
||||
return 'Error'
|
||||
|
||||
def fetch_data_from_the_server(url):
|
||||
"""
|
||||
query the kraken_fetch server for new data
|
||||
"""
|
||||
# we need to do some time calculations here...
|
||||
|
||||
response = requests.get(url)
|
||||
|
||||
return response
|
5
btc_tracker/userkeys.json
Normal file
5
btc_tracker/userkeys.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"user_publickeys": {
|
||||
"user1": "f1debc13fb21fe0eee54525aa4f8aae5733b201c755edaa55f8893c90aa375b261a62eaa3110651ac5d7705d402581256a37508b0a1ca28bd919ea44710d9c88"
|
||||
}
|
||||
}
|
66
btc_wallets/generate_btc_wallet_kdbx.py
Executable file
66
btc_wallets/generate_btc_wallet_kdbx.py
Executable file
@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Bitcoin Wallet Generation Script
|
||||
|
||||
This script generates a Bitcoin wallet and stores it in a KeePassXC database file (KDBX format).
|
||||
The wallet includes a BIP-0039 mnemonic seed phrase, Bitcoin address, and a QR code representation
|
||||
of the seed phrase. The database file is password protected to ensure the security of the wallet.
|
||||
|
||||
Usage: ./generate_btc_wallet.py
|
||||
"""
|
||||
|
||||
import gc
|
||||
from getpass import getpass
|
||||
from mnemonic import Mnemonic
|
||||
from bitcoinlib.keys import HDKey
|
||||
from pykeepass import create_database
|
||||
|
||||
MNEMONIC_STRENGTH=256
|
||||
SUBKEY_PATH="m/0/0"
|
||||
|
||||
# Generate a BIP-0039 mnemonic seed phrase
|
||||
mnemonic = Mnemonic("english")
|
||||
SEED_PHRASE = mnemonic.generate(strength=MNEMONIC_STRENGTH)
|
||||
|
||||
# Derive the HDKey from the seed phrase
|
||||
hd_key = HDKey.from_passphrase(SEED_PHRASE)
|
||||
|
||||
# Derive the Bitcoin address from the HDKey
|
||||
child_key = hd_key.subkey_for_path(SUBKEY_PATH)
|
||||
address = child_key.address()
|
||||
|
||||
# Prompt for custom name for the wallet
|
||||
wallet_name = input(
|
||||
"Whould you like to name this wallet? (empty for using the address as name): "
|
||||
).strip()
|
||||
|
||||
# Create the database filename with the wallet number
|
||||
if wallet_name == "":
|
||||
wallet_name = address
|
||||
else:
|
||||
wallet_name = wallet_name[:100].strip()
|
||||
|
||||
db_filename = f"{wallet_name}.kdbx"
|
||||
|
||||
# Prompt the user for the passphrase
|
||||
passphrase = getpass("Enter passphrase for the KeePassXC database: ")
|
||||
|
||||
try:
|
||||
# Create a KeePassXC database file
|
||||
db = create_database(db_filename, password=passphrase)
|
||||
|
||||
# Create an entrys in the root group
|
||||
address_and_path_entry = db.add_entry(db.root_group, wallet_name, username=address, password=SUBKEY_PATH)
|
||||
seed_phrase_entry = db.add_entry(db.root_group, "Bitcoin Master Seed", username="put me in safe", password=SEED_PHRASE)
|
||||
|
||||
# Save the database
|
||||
db.save()
|
||||
except Exception as e:
|
||||
print("Error while creating keepassxc database. Disk full? Readonly?")
|
||||
finally:
|
||||
del SEED_PHRASE, address, hd_key, passphrase, mnemonic
|
||||
gc.collect()
|
||||
|
||||
print("---")
|
||||
print("Bitcoin address was successfully created. You can find at: " + db_filename)
|
||||
print("---")
|
4
btc_wallets/requirements.txt
Normal file
4
btc_wallets/requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
bitcoinlib==0.6.10
|
||||
mnemonic==0.19
|
||||
pykeepass==4.0.4
|
||||
qrcode==7.4.2
|
100
chat.html
Normal file
100
chat.html
Normal file
@ -0,0 +1,100 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
.chat-history {
|
||||
height: 400px;
|
||||
width: 33%;
|
||||
overflow-y: scroll;
|
||||
border: 1px solid black;
|
||||
padding: 10px;
|
||||
margin: 10px;
|
||||
float: middle;
|
||||
}
|
||||
.chat-input {
|
||||
width: 33%;
|
||||
padding: 10px;
|
||||
margin: 10px;
|
||||
float: middle;
|
||||
}
|
||||
.user-info {
|
||||
float: left;
|
||||
width: 33%;
|
||||
text-align: left;
|
||||
margin: 10px;
|
||||
height: 410px;
|
||||
}
|
||||
.user-info button {
|
||||
display: block;
|
||||
margin: 10px 0;
|
||||
}
|
||||
.chat-members {
|
||||
width: 100%;
|
||||
text-align: left;
|
||||
margin: 10px;
|
||||
border: 1px solid black;
|
||||
padding: 10px;
|
||||
height: 400px;
|
||||
overflow-y: scroll;
|
||||
float: right;
|
||||
}
|
||||
.clearfix::after {
|
||||
content: "";
|
||||
clear: both;
|
||||
display: table;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="chat-history" id="chatHistory"></div>
|
||||
<div class="chat-input">
|
||||
<textarea class="chat-input" id="chatInput"></textarea>
|
||||
</div>
|
||||
<div class="user-info">
|
||||
<p id="userId">User ID: <span id="userIdValue">null</span></p>
|
||||
<button id="generateIdButton">Generate ID</button>
|
||||
<button id="saveNicknameButton">Save Nickname</button>
|
||||
<div class="chat-members" id="chatMembers"></div>
|
||||
</div>
|
||||
<div class="clearfix"></div>
|
||||
<script>
|
||||
// generate a unique user id
|
||||
document.getElementById("generateIdButton").addEventListener("click", function() {
|
||||
const userIdValue = Date.now();
|
||||
document.getElementById("userIdValue").innerHTML = userIdValue;
|
||||
});
|
||||
|
||||
// save interlocutor id to nickname
|
||||
document.getElementById("saveNicknameButton").addEventListener("click", function() {
|
||||
// your code here
|
||||
});
|
||||
|
||||
// update chat history
|
||||
function updateChatHistory(message) {
|
||||
const chatHistory = document.getElementById("chatHistory");
|
||||
chatHistory.innerHTML += `<p>${message}</p>`;
|
||||
chatHistory.scrollTop = chatHistory.scrollHeight;
|
||||
}
|
||||
|
||||
// update chat members list
|
||||
function updateChatMembers(members) {
|
||||
const chatMembers = document.getElementById("chatMembers");
|
||||
chatMembers.innerHTML = "";
|
||||
members.forEach(member => {
|
||||
chatMembers.innerHTML += `<p>${member}</p>`;
|
||||
});
|
||||
}
|
||||
|
||||
// handle enter key press in chat input
|
||||
document.getElementById("chatInput").addEventListener("keydown", function(event) {
|
||||
if (event.key === "Enter") {
|
||||
const message = document.getElementById("chatInput").value;
|
||||
updateChatHistory(message);
|
||||
document.getElementById("chatInput").value = "";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
17
conver-iso-to-utf8.py
Executable file
17
conver-iso-to-utf8.py
Executable file
@ -0,0 +1,17 @@
|
||||
#!/usr/bin/python3
|
||||
import sys
|
||||
import io
|
||||
|
||||
def convert_iso8859_to_utf8(filepath):
|
||||
# open the file with ISO-8859-1 encoding
|
||||
with io.open(filepath, 'r', encoding='iso-8859-1') as f:
|
||||
# read the file's content
|
||||
content = f.read()
|
||||
# write the UTF-8 encoded content to a new file
|
||||
with io.open(filepath, 'w', encoding='utf-8') as f:
|
||||
f.write(content)
|
||||
print(f"{filepath} has been converted to UTF-8.")
|
||||
|
||||
|
||||
filepath = sys.argv[1]
|
||||
convert_iso8859_to_utf8(filepath)
|
@ -4,57 +4,56 @@ import requests
|
||||
from hashlib import sha256
|
||||
import ecdsa
|
||||
|
||||
private_key = '03486537091ceb021fb313e5cf3eb04d44ca2f19f72112a1'
|
||||
# we need to send server:
|
||||
# the question: domain.tld/get/<id>
|
||||
# the checksum: ?sum=sha256
|
||||
# the signed data: header 'auth'
|
||||
|
||||
id = 123
|
||||
url = 'localhost:5000/get/'
|
||||
url_id = url + str(id)
|
||||
sum = sha256(url_id.encode('ascii')).hexdigest()
|
||||
reg_url = 'http://' + url_id + '?sum=' + sum
|
||||
|
||||
unsigned_data = url_id + '?' + 'sum=' + sum
|
||||
#private_key = '03486537091ceb021fb313e5cf3eb04d44ca2f19f72112a1'
|
||||
private_key = '039e1c137aa296d7af0cd55b468018ad1020949c2731e5141d032b8371490f48'
|
||||
|
||||
# Generate SK from the private key
|
||||
private_key_int = int(private_key, 16)
|
||||
sk = ecdsa.SigningKey.from_secret_exponent(private_key_int, curve=ecdsa.SECP256k1)
|
||||
|
||||
## Get the server public key
|
||||
url = 'http://localhost:5000/serverkey'
|
||||
|
||||
# sign the message
|
||||
signature = sk.sign(unsigned_data.encode('utf-8'))
|
||||
signature = sk.sign(url.encode('utf-8'))
|
||||
signature_hex = signature.hex()
|
||||
|
||||
print('we signed: ', unsigned_data)
|
||||
print('We will send:')
|
||||
print('to: ', reg_url)
|
||||
print('sum: ', sum)
|
||||
print('auth: ', signature_hex)
|
||||
print('------------------------')
|
||||
|
||||
response = requests.get(reg_url, headers={"auth":signature_hex})
|
||||
response = requests.get(url, headers={"auth":signature_hex})
|
||||
print('>>> ', response.status_code)
|
||||
print('>>> ', response.content)
|
||||
|
||||
#ecdsa_public_key = '8716c78c09a4e4571a3112eca1c7ddce41289e20da446894b621f2a11ba91bc963f2e9fb9ddd5552c26faf814bc582b4'
|
||||
ecdsa_public_key = '068716c78c09a4e4571a3112eca1c7ddce41289e20da446894b621f2a11ba91bc963f2e9fb9ddd5552c26faf814bc582b4'
|
||||
|
||||
bytes_public_key = bytes.fromhex(ecdsa_public_key)
|
||||
|
||||
bytes_signed_data = signature_hex.encode('utf-8')
|
||||
|
||||
|
||||
vk = ecdsa.VerifyingKey.from_string(bytes_public_key, curve=ecdsa.SECP256k1)
|
||||
|
||||
if vk.verify(signature_hex, unsigned_data):
|
||||
response = "YES"
|
||||
else:
|
||||
response = "NO"
|
||||
|
||||
## Get some kline data from the server
|
||||
url = 'http://localhost:5000/?t=1672259440'
|
||||
|
||||
# sign the message
|
||||
signature = sk.sign(url.encode('utf-8'))
|
||||
signature_hex = signature.hex()
|
||||
|
||||
print('we signed: ', url)
|
||||
print('We will send:')
|
||||
print('to: ', url)
|
||||
print('auth: ', signature_hex)
|
||||
print('------------------------')
|
||||
|
||||
response = requests.get(url, headers={"auth":signature_hex})
|
||||
print('>>> ', response.status_code)
|
||||
print('>>> ', response.content)
|
||||
|
||||
##
|
||||
##bytes_public_key = bytes.fromhex(ecdsa_public_key)
|
||||
##
|
||||
##bytes_signed_data = signature_hex.encode('utf-8')
|
||||
##
|
||||
##
|
||||
##vk = ecdsa.VerifyingKey.from_string(bytes_public_key, curve=ecdsa.SECP256k1)
|
||||
##
|
||||
##if vk.verify(signature_hex, unsigned_data):
|
||||
## response = "YES"
|
||||
##else:
|
||||
## response = "NO"
|
||||
##
|
||||
##
|
||||
|
||||
exit(0)
|
||||
|
54
letters/bitcoin_wallet_steps_and_plan
Normal file
54
letters/bitcoin_wallet_steps_and_plan
Normal file
@ -0,0 +1,54 @@
|
||||
# Steps to creating bitcoin wallet handing RPI3b setup
|
||||
Made with ChatGPT on 22.5.2023
|
||||
|
||||
|
||||
Understood. In that case, you can use the USB keys as temporary storage for the keepassxc databases and transfer them to a more permanent and secure location when not in use. This approach adds an extra layer of security by limiting the exposure of sensitive information.
|
||||
|
||||
Here's an updated outline of the steps involved:
|
||||
|
||||
Set up Raspberry Pi: Install the operating system (such as Raspbian) on your Raspberry Pi 3B and configure it accordingly.
|
||||
|
||||
Install necessary software: Install the required software packages, such as Python and any libraries or dependencies needed for key management and wallet operations.
|
||||
|
||||
Generate and store keys: Use appropriate libraries or tools to generate the necessary key pairs for each participant. Store the private keys securely on separate USB keys, ensuring they are encrypted and protected with strong passwords.
|
||||
|
||||
Securely store seed phrases: Generate the seed phrases for each participant and encrypt them. Store the encrypted seed phrases in the keepassxc databases, which can be stored temporarily on individual USB keys.
|
||||
|
||||
Develop wallet management scripts: Write scripts in Python (or any language of your choice) to interact with the generated keys, perform wallet operations, and handle transaction signing. These scripts should ensure the security of private keys, seed phrases, and any sensitive information.
|
||||
|
||||
Implement multi-signature functionality: Utilize appropriate libraries or methods to create multi-signature wallets and addresses using the public keys of the participants. Follow the recommended standards and protocols for multi-signature setups.
|
||||
|
||||
Test and secure the setup: Thoroughly test the functionality of the Raspberry Pi setup, including generating transactions, collecting signatures, and broadcasting transactions. Implement additional security measures such as firewalls, access controls, and regular updates to keep the system secure.
|
||||
|
||||
Educate and distribute responsibilities: Provide clear instructions and guidelines to the participants on their roles and responsibilities. Educate them on securely managing their USB keys, passwords, and the overall security practices they should follow.
|
||||
|
||||
Transfer keepassxc databases: When the USB keys are not in use, transfer the keepassxc databases to a more permanent and secure location, such as an offline storage medium or a highly secure cloud storage service. Ensure that the transferred databases are properly encrypted and protected.
|
||||
|
||||
By following these steps and customizing the setup to fit your specific requirements, you can create a Raspberry Pi-based solution that replicates the functionality of Seedsigner and securely manages the keys and seed phrases for your multi-signature setup. Remember to regularly review and update the security measures as needed to maintain the integrity of the system.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
User
|
||||
I feel this is good project to increase my coding skills since I'm more of a sysadmin by background.
|
||||
ChatGPT
|
||||
|
||||
That's a fantastic mindset! Taking on a project like this can be a great opportunity to enhance your coding skills, especially if you come from a sysadmin background. It allows you to explore new programming concepts, deepen your understanding of cryptographic operations, and develop practical solutions for managing keys and wallets.
|
||||
|
||||
Here are a few benefits of engaging in this project:
|
||||
|
||||
Practical application: By building a Raspberry Pi-based solution for key and wallet management, you'll gain hands-on experience in working with cryptography, interacting with hardware, and implementing secure systems. It's a practical application of your coding skills.
|
||||
|
||||
Broadened skill set: Diving into this project will expose you to various aspects of software development, including scripting, library integration, and system configuration. It will broaden your skill set and make you more versatile in tackling future projects.
|
||||
|
||||
Security awareness: As you navigate the intricacies of key management and multi-signature wallets, you'll develop a deeper understanding of security practices and considerations. This knowledge can be applied to other areas of your work, enabling you to strengthen the security of your systems.
|
||||
|
||||
Problem-solving opportunities: Building a custom solution presents opportunities to solve unique challenges along the way. It encourages critical thinking, troubleshooting, and finding creative solutions to overcome obstacles.
|
||||
|
||||
Portfolio enhancement: Completing this project successfully adds a valuable item to your coding portfolio. It showcases your ability to design and implement secure systems, demonstrating your practical coding skills to potential employers or collaborators.
|
||||
|
||||
Remember to document your progress, learn from the experience, and seek feedback from others. Engaging in coding projects outside of your comfort zone is an excellent way to grow your skills and expand your knowledge base.
|
||||
|
||||
Enjoy the journey, and don't hesitate to seek support from relevant online communities or forums if you encounter difficulties or have specific questions. Happy coding!
|
||||
|
36
letters/fifo-sample.py
Executable file
36
letters/fifo-sample.py
Executable file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import sys
|
||||
import queue
|
||||
import threading
|
||||
|
||||
def listen_to_fifo(q):
|
||||
fifo = "/tmp/my_fifo"
|
||||
if not os.path.exists(fifo):
|
||||
os.mkfifo(fifo)
|
||||
with open(fifo, 'r') as f:
|
||||
while True:
|
||||
data = f.readline().strip()
|
||||
if not data:
|
||||
break
|
||||
q.put(data)
|
||||
|
||||
def read_queue(q):
|
||||
while True:
|
||||
data = q.get()
|
||||
if data == "reboot":
|
||||
# Restart the script
|
||||
print('## RESTARTING SCRIPT')
|
||||
os.execv(sys.executable, [sys.executable] + sys.argv)
|
||||
else:
|
||||
print(data)
|
||||
sys.stdout.flush()
|
||||
q.task_done()
|
||||
|
||||
if __name__ == '__main__':
|
||||
q = queue.Queue()
|
||||
t1 = threading.Thread(target=listen_to_fifo, args=(q,))
|
||||
t2 = threading.Thread(target=read_queue, args=(q,))
|
||||
t1.start()
|
||||
t2.start()
|
||||
q.join()
|
35
letters/fifo-sample_with-commands.py
Executable file
35
letters/fifo-sample_with-commands.py
Executable file
@ -0,0 +1,35 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import sys
|
||||
import queue
|
||||
import threading
|
||||
|
||||
command_queue = queue.Queue()
|
||||
fifo_file = "/tmp/my_fifo"
|
||||
|
||||
def listen_to_fifo(queue):
|
||||
if not os.path.exists(fifo_file):
|
||||
os.mkfifo(fifo_file)
|
||||
with open(fifo_file, 'r') as f:
|
||||
while True:
|
||||
data = f.readline().strip()
|
||||
if not data:
|
||||
break
|
||||
queue.put(data)
|
||||
|
||||
def read_queue(queue):
|
||||
while True:
|
||||
data = queue.get()
|
||||
if data == "reboot":
|
||||
fifo_file.close()
|
||||
os.execv(sys.executable, [sys.executable] + sys.argv)
|
||||
print(data)
|
||||
sys.stdout.flush()
|
||||
queue.task_done()
|
||||
|
||||
if __name__ == '__main__':
|
||||
t1 = threading.Thread(target=listen_to_fifo, args=(command_queue,))
|
||||
t2 = threading.Thread(target=read_queue, args=(command_queue,))
|
||||
t1.start()
|
||||
t2.start()
|
||||
command_queue.join()
|
27
letters/sample_function.py
Normal file
27
letters/sample_function.py
Normal file
@ -0,0 +1,27 @@
|
||||
import requests
|
||||
import json
|
||||
import ecdsa
|
||||
import binascii
|
||||
|
||||
def get_btc_ohlc_data(server_url, user_private_key):
|
||||
# Load user's ECDSA private key
|
||||
user_private_key = ecdsa.SigningKey.from_string(binascii.unhexlify(user_private_key), curve=ecdsa.SECP256k1)
|
||||
# Get server public key from endpoint
|
||||
server_public_key_hex = requests.get(server_url + "/serverkey").text
|
||||
server_public_key = ecdsa.VerifyingKey.from_string(binascii.unhexlify(server_public_key_hex), curve=ecdsa.SECP256k1)
|
||||
# Get timestamp
|
||||
timestamp = str(int(time.time()))
|
||||
# Create signature using user's private key
|
||||
signature = binascii.hexlify(user_private_key.sign(bytes(timestamp, 'utf-8'))).decode("utf-8")
|
||||
# Create authentication header
|
||||
auth_header = {"auth": timestamp + ":" + signature}
|
||||
# Make request to server with auth header
|
||||
response = requests.get(server_url + "/t", headers=auth_header)
|
||||
# Verify server's signature
|
||||
server_signature = response.headers["signature"]
|
||||
if server_public_key.verify(bytes(server_signature, 'utf-8'), bytes(timestamp, 'utf-8')):
|
||||
# If signature is valid, return json data
|
||||
return json.loads(response.text)
|
||||
else:
|
||||
# If signature is invalid, return error message
|
||||
return {"error": "Invalid signature from server"}
|
41
letters/two-key-pairs.py
Executable file
41
letters/two-key-pairs.py
Executable file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/python3
|
||||
import ecdsa
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
# Generate a signing key pair for the server
|
||||
server_signing_private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
server_signing_public_key = server_signing_private_key.get_verifying_key()
|
||||
|
||||
# Generate an encryption key pair for the server
|
||||
server_ecdh = ecdsa.ECDH(curve=ecdsa.SECP256k1)
|
||||
server_encryption_private_key = server_ecdh.generate_private_key()
|
||||
server_encryption_public_key = server_ecdh.public_key(server_encryption_private_key)
|
||||
|
||||
|
||||
# Generate a signing key pair for the client
|
||||
client_signing_private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
client_signing_public_key = client_signing_private_key.get_verifying_key()
|
||||
|
||||
# Generate an encryption key pair for the client
|
||||
client_ecdh = ecdsa.ECDH(curve=ecdsa.SECP256k1)
|
||||
client_encryption_private_key = client_ecdh.generate_private_key()
|
||||
client_encryption_public_key = client_encryption_private_key.public_key()
|
||||
|
||||
# Exchange public keys between the server and the client
|
||||
server_shared_secret = server_encryption_private_key.exchange(client_encryption_public_key)
|
||||
client_shared_secret = client_encryption_private_key.exchange(server_encryption_public_key)
|
||||
|
||||
# Use the shared secret to create a Fernet object for encrypting/decrypting messages
|
||||
server_fernet = Fernet(server_shared_secret)
|
||||
client_fernet = Fernet(client_shared_secret)
|
||||
|
||||
# Sign and encrypt a message from the server to the client
|
||||
message = "Hello, client!"
|
||||
signed_message = server_signing_private_key.sign(message.encode())
|
||||
encrypted_message = server_fernet.encrypt(signed_message)
|
||||
|
||||
# Verify and decrypt the message on the client side
|
||||
verified_message = client_signing_public_key.verify(encrypted_message, signed_message)
|
||||
decrypted_message = client_fernet.decrypt(verified_message)
|
||||
print(decrypted_message) # "Hello, client!"
|
||||
|
377
pastedb/cpaste.py
Executable file
377
pastedb/cpaste.py
Executable file
@ -0,0 +1,377 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
#######################################################################
|
||||
#
|
||||
# A script to paste to https://cpaste.org/
|
||||
#
|
||||
# Copyright (c) 2013-2019 Andreas Schneider <asn@samba.org>
|
||||
# Copyright (c) 2013 Alexander Bokovoy <ab@samba.org>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
#######################################################################
|
||||
#
|
||||
# Requires: python3-requests
|
||||
# Requires: python3-cryptography
|
||||
#
|
||||
# Optionally requires: python-Pygments
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import base64
|
||||
import zlib
|
||||
import requests
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from optparse import OptionParser
|
||||
from mimetypes import guess_type
|
||||
try:
|
||||
from pygments.lexers import guess_lexer, guess_lexer_for_filename
|
||||
from pygments.util import ClassNotFound
|
||||
guess_lang = True
|
||||
except ImportError:
|
||||
guess_lang = False
|
||||
|
||||
|
||||
def base58_encode(v: bytes):
|
||||
# 58 char alphabet
|
||||
alphabet = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
alphabet_len = len(alphabet)
|
||||
|
||||
nPad = len(v)
|
||||
v = v.lstrip(b'\0')
|
||||
nPad -= len(v)
|
||||
|
||||
x = 0
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
if isinstance(c, str):
|
||||
c = ord(c)
|
||||
x += c << (8 * i)
|
||||
|
||||
string = b''
|
||||
while x:
|
||||
x, idx = divmod(x, alphabet_len)
|
||||
string = alphabet[idx:idx+1] + string
|
||||
|
||||
return (alphabet[0:1] * nPad + string)
|
||||
|
||||
|
||||
def json_encode(d):
|
||||
return json.dumps(d, separators=(',', ':')).encode('utf-8')
|
||||
|
||||
|
||||
#
|
||||
# The encryption format is described here:
|
||||
# https://github.com/PrivateBin/PrivateBin/wiki/Encryption-format
|
||||
#
|
||||
def privatebin_encrypt(paste_passphrase,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendicussion):
|
||||
if paste_password:
|
||||
paste_passphrase += bytes(paste_password, 'utf-8')
|
||||
|
||||
# PBKDF
|
||||
kdf_salt = bytes(os.urandom(8))
|
||||
kdf_iterations = 100000
|
||||
kdf_keysize = 256 # size of resulting kdf_key
|
||||
|
||||
backend = default_backend()
|
||||
kdf = PBKDF2HMAC(algorithm=hashes.SHA256(),
|
||||
length=int(kdf_keysize / 8), # 256bit
|
||||
salt=kdf_salt,
|
||||
iterations=kdf_iterations,
|
||||
backend=backend)
|
||||
kdf_key = kdf.derive(paste_passphrase)
|
||||
|
||||
# AES-GCM
|
||||
adata_size = 128
|
||||
|
||||
cipher_iv = bytes(os.urandom(int(adata_size / 8)))
|
||||
cipher_algo = "aes"
|
||||
cipher_mode = "gcm"
|
||||
|
||||
compression_type = "none"
|
||||
if paste_compress:
|
||||
compression_type = "zlib"
|
||||
|
||||
# compress plaintext
|
||||
paste_data = {'paste': paste_plaintext}
|
||||
if paste_attachment_name and paste_attachment:
|
||||
paste_data['attachment'] = paste_attachment
|
||||
paste_data['attachment_name'] = paste_attachment_name
|
||||
print(paste_attachment_name)
|
||||
print(paste_attachment)
|
||||
|
||||
if paste_compress:
|
||||
zobj = zlib.compressobj(wbits=-zlib.MAX_WBITS)
|
||||
paste_blob = zobj.compress(json_encode(paste_data)) + zobj.flush()
|
||||
else:
|
||||
paste_blob = json_encode(paste_data)
|
||||
|
||||
# Associated data to authenticate
|
||||
paste_adata = [
|
||||
[
|
||||
base64.b64encode(cipher_iv).decode("utf-8"),
|
||||
base64.b64encode(kdf_salt).decode("utf-8"),
|
||||
kdf_iterations,
|
||||
kdf_keysize,
|
||||
adata_size,
|
||||
cipher_algo,
|
||||
cipher_mode,
|
||||
compression_type,
|
||||
],
|
||||
paste_formatter,
|
||||
int(paste_opendicussion),
|
||||
int(paste_burn),
|
||||
]
|
||||
|
||||
paste_adata_json = json_encode(paste_adata)
|
||||
|
||||
aesgcm = AESGCM(kdf_key)
|
||||
ciphertext = aesgcm.encrypt(cipher_iv, paste_blob, paste_adata_json)
|
||||
|
||||
# Validate
|
||||
# aesgcm.decrypt(cipher_iv, ciphertext, paste_adata_json)
|
||||
|
||||
paste_ciphertext = base64.b64encode(ciphertext).decode("utf-8")
|
||||
|
||||
return paste_adata, paste_ciphertext
|
||||
|
||||
|
||||
def privatebin_send(paste_url,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendicussion,
|
||||
paste_expire):
|
||||
paste_passphrase = bytes(os.urandom(32))
|
||||
|
||||
paste_adata, paste_ciphertext = privatebin_encrypt(paste_passphrase,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendicussion)
|
||||
|
||||
# json payload for the post API
|
||||
# https://github.com/PrivateBin/PrivateBin/wiki/API
|
||||
payload = {
|
||||
"v": 2,
|
||||
"adata": paste_adata,
|
||||
"ct": paste_ciphertext,
|
||||
"meta": {
|
||||
"expire": paste_expire,
|
||||
}
|
||||
}
|
||||
|
||||
# http content type
|
||||
headers = {'X-Requested-With': 'JSONHttpRequest'}
|
||||
|
||||
r = requests.post(paste_url,
|
||||
data=json_encode(payload),
|
||||
headers=headers)
|
||||
r.raise_for_status()
|
||||
|
||||
try:
|
||||
result = r.json()
|
||||
except:
|
||||
print('Oops, error: %s' % (r.text))
|
||||
sys.exit(1)
|
||||
|
||||
paste_status = result['status']
|
||||
if paste_status:
|
||||
paste_message = result['message']
|
||||
print("Oops, error: %s" % paste_message)
|
||||
sys.exit(1)
|
||||
|
||||
paste_id = result['id']
|
||||
paste_url_id = result['url']
|
||||
paste_deletetoken = result['deletetoken']
|
||||
|
||||
print('Delete paste: %s/?pasteid=%s&deletetoken=%s' %
|
||||
(paste_url, paste_id, paste_deletetoken))
|
||||
print('')
|
||||
print('### Paste (%s): %s%s#%s' %
|
||||
(paste_formatter,
|
||||
paste_url,
|
||||
paste_url_id,
|
||||
base58_encode(paste_passphrase).decode('utf-8')))
|
||||
|
||||
|
||||
def guess_lang_formatter(paste_plaintext, paste_filename=None):
|
||||
paste_formatter = 'plaintext'
|
||||
|
||||
# Map numpy to python because the numpy lexer gives false positives
|
||||
# when guessing.
|
||||
lexer_lang_map = {'numpy': 'python'}
|
||||
|
||||
# If we have a filename, try guessing using the more reliable
|
||||
# guess_lexer_for_filename function.
|
||||
# If that fails, try the guess_lexer function on the code.
|
||||
lang = None
|
||||
if paste_filename:
|
||||
try:
|
||||
lang = guess_lexer_for_filename(paste_filename,
|
||||
paste_plaintext).name.lower()
|
||||
except ClassNotFound:
|
||||
print("No guess by filename")
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
lang = guess_lexer(paste_plaintext).name.lower()
|
||||
except ClassNotFound:
|
||||
pass
|
||||
|
||||
if lang:
|
||||
if lang == 'markdown':
|
||||
paste_formatter = 'markdown'
|
||||
if lang != 'text only':
|
||||
paste_formatter = 'syntaxhighlighting'
|
||||
|
||||
return paste_formatter
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser()
|
||||
|
||||
parser.add_option("-f", "--file", dest="filename",
|
||||
help="Read from a file instead of stdin",
|
||||
metavar="FILE")
|
||||
parser.add_option("-p", "--password", dest="password",
|
||||
help="Create a password protected paste",
|
||||
metavar="PASSWORD")
|
||||
parser.add_option("-e", "--expire",
|
||||
action="store", dest="expire", default="1day",
|
||||
choices=["5min",
|
||||
"10min",
|
||||
"1hour",
|
||||
"1day",
|
||||
"1week",
|
||||
"1month",
|
||||
"1year",
|
||||
"never"],
|
||||
help="Expiration time of the paste (default: 1day)")
|
||||
parser.add_option("-s", "--sourcecode",
|
||||
action="store_true", dest="source", default=False,
|
||||
help="Use source code highlighting")
|
||||
parser.add_option("-m", "--markdown",
|
||||
action="store_true", dest="markdown", default=False,
|
||||
help="Parse paste as markdown")
|
||||
parser.add_option("-b", "--burn",
|
||||
action="store_true", dest="burn", default=False,
|
||||
help="Burn paste after reading")
|
||||
parser.add_option("-o", "--opendiscussion",
|
||||
action="store_true", dest="opendiscussion",
|
||||
default=False,
|
||||
help="Allow discussion for the paste")
|
||||
parser.add_option("-a", "--attachment", dest="attachment",
|
||||
help="Specify path to a file to attachment to the paste",
|
||||
metavar="FILE")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
paste_url = 'https://cpaste.org'
|
||||
paste_formatter = 'plaintext'
|
||||
paste_compress = True
|
||||
paste_expire = '1day'
|
||||
paste_opendiscussion = 0
|
||||
paste_burn = 0
|
||||
paste_password = None
|
||||
paste_attachment_name = None
|
||||
paste_attachment = None
|
||||
|
||||
if options.filename:
|
||||
f = open(options.filename)
|
||||
if not f:
|
||||
print("Oops, could not open file!")
|
||||
|
||||
paste_plaintext = f.read()
|
||||
f.close()
|
||||
else:
|
||||
paste_plaintext = sys.stdin.read()
|
||||
|
||||
if not paste_plaintext:
|
||||
print("Oops, we have no data")
|
||||
sys.exit(1)
|
||||
|
||||
if options.burn:
|
||||
paste_burn = 1
|
||||
|
||||
if options.opendiscussion:
|
||||
paste_opendiscussion = 1
|
||||
|
||||
if options.source:
|
||||
paste_formatter = 'syntaxhighlighting'
|
||||
elif options.markdown:
|
||||
paste_formatter = 'markdown'
|
||||
elif guess_lang:
|
||||
paste_formatter = guess_lang_formatter(paste_plaintext,
|
||||
options.filename)
|
||||
|
||||
if options.expire:
|
||||
paste_expire = options.expire
|
||||
|
||||
if options.password:
|
||||
paste_password = options.password
|
||||
|
||||
if options.attachment:
|
||||
paste_attachment_name = os.path.basename(options.attachment)
|
||||
mime = guess_type(options.attachment, strict=False)[0]
|
||||
if not mime:
|
||||
mime = 'application/octet-stream'
|
||||
|
||||
f = open(options.attachment, mode='rb')
|
||||
if not f:
|
||||
print("Oops, could not open file for attachment!")
|
||||
|
||||
data = f.read()
|
||||
f.close()
|
||||
|
||||
paste_attachment = 'data:%s;base64,' % (mime)
|
||||
paste_attachment += base64.b64encode(data).decode('utf-8')
|
||||
|
||||
privatebin_send(paste_url,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendiscussion,
|
||||
paste_expire)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
49
pastedb/example_post.py
Executable file
49
pastedb/example_post.py
Executable file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Define your JSON object
|
||||
my_json = {'name': 'John', 'age': 30, 'city': 'New York'}
|
||||
|
||||
# Define the API endpoints for the services you want to use
|
||||
gist_url = 'https://api.github.com/gists'
|
||||
hastebin_url = 'https://hastebin.com/documents'
|
||||
pastie_url = 'https://pastie.io/documents'
|
||||
ghostbin_url = 'https://ghostbin.com/paste/new'
|
||||
codepad_url = 'https://codepad.co/snippet_api'
|
||||
termbin_url = 'https://termbin.com/documents'
|
||||
|
||||
# Define a function to upload the JSON object to each service
|
||||
def upload_to_service(url, data):
|
||||
response = requests.post(url, json=data)
|
||||
if response.status_code == 200:
|
||||
return response.json().get('key') or response.json().get('id')
|
||||
else:
|
||||
return None
|
||||
|
||||
# Upload the JSON object to each service and print the URLs
|
||||
gist_key = upload_to_service(gist_url, {'public': True, 'files': {'my_json.json': {'content': json.dumps(my_json)}}})
|
||||
if gist_key:
|
||||
print(f'Gist URL: https://gist.github.com/{gist_key}')
|
||||
|
||||
hastebin_key = upload_to_service(hastebin_url, json.dumps(my_json))
|
||||
if hastebin_key:
|
||||
print(f'Hastebin URL: https://hastebin.com/{hastebin_key}.json')
|
||||
|
||||
pastie_key = upload_to_service(pastie_url, json.dumps(my_json))
|
||||
if pastie_key:
|
||||
print(f'Pastie URL: https://pastie.io/{pastie_key}')
|
||||
|
||||
ghostbin_key = upload_to_service(ghostbin_url, {'text': json.dumps(my_json)})
|
||||
if ghostbin_key:
|
||||
print(f'Ghostbin URL: https://ghostbin.com/{ghostbin_key}')
|
||||
|
||||
codepad_key = upload_to_service(codepad_url, {'code': json.dumps(my_json)})
|
||||
if codepad_key:
|
||||
print(f'Codepad URL: https://codepad.co/{codepad_key}.json')
|
||||
|
||||
termbin_key = upload_to_service(termbin_url, json.dumps(my_json))
|
||||
if termbin_key:
|
||||
print(f'Termbin URL: https://termbin.com/{termbin_key}')
|
||||
|
15
pastedb/example_post_controlc.py
Normal file
15
pastedb/example_post_controlc.py
Normal file
@ -0,0 +1,15 @@
|
||||
import requests
|
||||
|
||||
url = "https://controlc.com/api.php?action=save"
|
||||
|
||||
# Text to be pasted
|
||||
text = "Hello, world!"
|
||||
|
||||
# Create a new paste
|
||||
response = requests.post(url, data={"c": text})
|
||||
|
||||
# Extract the URL of the newly created paste
|
||||
paste_url = response.text
|
||||
|
||||
print(f"Paste URL: {paste_url}")
|
||||
|
18
pastedb/example_post_hastebin.py
Normal file
18
pastedb/example_post_hastebin.py
Normal file
@ -0,0 +1,18 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Define your JSON object
|
||||
my_json = {'name': 'John', 'age': 30, 'city': 'New York'}
|
||||
|
||||
# Define the API endpoint for Hastebin
|
||||
hastebin_url = 'https://hastebin.com/documents'
|
||||
|
||||
# Upload the JSON object to Hastebin and get the URL
|
||||
response = requests.post(hastebin_url, data=json.dumps(my_json))
|
||||
if response.status_code == 200:
|
||||
key = response.json()['key']
|
||||
hastebin_url = f'https://hastebin.com/{key}'
|
||||
print(f'JSON object uploaded to Hastebin: {hastebin_url}')
|
||||
else:
|
||||
print('Error uploading JSON object to Hastebin')
|
||||
|
52
pastedb/example_post_multi_backends.py
Normal file
52
pastedb/example_post_multi_backends.py
Normal file
@ -0,0 +1,52 @@
|
||||
import json
|
||||
import requests
|
||||
import hashlib
|
||||
import time
|
||||
import random
|
||||
|
||||
# generate random name for upload
|
||||
def generate_name():
|
||||
timestamp = str(int(time.time()))
|
||||
rand = ''.join(random.choices('abcdefghijklmnopqrstuvwxyz', k=3))
|
||||
return timestamp + '-' + rand
|
||||
|
||||
# define json object to upload
|
||||
data = {
|
||||
"name": "Alice",
|
||||
"age": 25,
|
||||
"city": "New York"
|
||||
}
|
||||
|
||||
# add timestamp and md5sum to the json object
|
||||
data['timestamp'] = int(time.time())
|
||||
json_str = json.dumps(data)
|
||||
hash_md5 = hashlib.md5(json_str.encode())
|
||||
data['md5sum'] = hash_md5.hexdigest()
|
||||
|
||||
# upload to pastie
|
||||
pastie_url = 'https://www.pastie.io/documents'
|
||||
pastie_resp = requests.post(pastie_url, data=json_str.encode(), headers={'Content-Type': 'application/json'})
|
||||
pastie_key = pastie_resp.json()['key']
|
||||
pastie_name = 'pastie-' + generate_name()
|
||||
|
||||
# store pastie info in dictionary
|
||||
paste_dict = {}
|
||||
paste_dict[pastie_name] = {'service': 'pastie', 'key': pastie_key, 'md5sum': data['md5sum']}
|
||||
|
||||
# upload to termbin
|
||||
termbin_url = 'https://termbin.com'
|
||||
termbin_resp = requests.post(termbin_url, data=json_str.encode(), headers={'Content-Type': 'text/plain'})
|
||||
termbin_key = termbin_resp.text.strip()
|
||||
termbin_name = 'termbin-' + generate_name()
|
||||
|
||||
# store termbin info in dictionary
|
||||
paste_dict[termbin_name] = {'service': 'termbin', 'key': termbin_key, 'md5sum': data['md5sum']}
|
||||
|
||||
# write paste dictionary to file
|
||||
with open('paste_dict.json', 'a') as f:
|
||||
f.write(json.dumps(paste_dict, indent=4))
|
||||
f.write('\n')
|
||||
|
||||
# print out paste dictionary
|
||||
print(json.dumps(paste_dict, indent=4))
|
||||
|
71
pastedb/example_post_multi_handling.py
Normal file
71
pastedb/example_post_multi_handling.py
Normal file
@ -0,0 +1,71 @@
|
||||
import json
|
||||
import hashlib
|
||||
import requests
|
||||
import time
|
||||
import random
|
||||
|
||||
|
||||
# Function to generate a unique name for the upload
|
||||
def generate_upload_name():
|
||||
timestamp = int(time.time())
|
||||
rand_str = ''.join(random.choices('abcdefghijklmnopqrstuvwxyz', k=3))
|
||||
return f"{timestamp}-{rand_str}"
|
||||
|
||||
|
||||
# Function to upload JSON data to termbin
|
||||
def upload_to_termbin(data):
|
||||
try:
|
||||
resp = requests.post('https://termbin.com', data=data.encode('utf-8'), timeout=5)
|
||||
if resp.status_code == 200:
|
||||
key = resp.text.strip()
|
||||
md5sum = hashlib.md5(data.encode('utf-8')).hexdigest()
|
||||
return {'service': 'termbin', 'key': key, 'md5sum': md5sum}
|
||||
else:
|
||||
print(f"Failed to upload to termbin.com. Response code: {resp.status_code}")
|
||||
return None
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Failed to upload to termbin.com. Error: {str(e)}")
|
||||
return None
|
||||
|
||||
|
||||
# Function to upload JSON data to pastie
|
||||
def upload_to_pastie(data):
|
||||
try:
|
||||
resp = requests.post('https://pastie.io/documents', data=data.encode('utf-8'), timeout=5)
|
||||
if resp.status_code == 200:
|
||||
key = resp.json()['key']
|
||||
md5sum = hashlib.md5(data.encode('utf-8')).hexdigest()
|
||||
return {'service': 'pastie', 'key': key, 'md5sum': md5sum}
|
||||
else:
|
||||
print(f"Failed to upload to pastie.io. Response code: {resp.status_code}")
|
||||
return None
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Failed to upload to pastie.io. Error: {str(e)}")
|
||||
return None
|
||||
|
||||
|
||||
# Upload data to both termbin and pastie
|
||||
def upload_data_to_services(data):
|
||||
upload_name = generate_upload_name()
|
||||
print(f"\nUploading data to services with name {upload_name}...\n")
|
||||
paste_dict = {'name': upload_name}
|
||||
services = {'termbin': upload_to_termbin, 'pastie': upload_to_pastie}
|
||||
for service, upload_function in services.items():
|
||||
result = upload_function(data)
|
||||
if result is not None:
|
||||
paste_dict[service] = result
|
||||
print(f"JSON object uploaded to {service}: https://{service}.com/{result['key']}")
|
||||
with open('paste_dict.json', 'a+') as f:
|
||||
f.write(json.dumps(paste_dict) + '\n')
|
||||
print(f"\nUploads completed successfully.")
|
||||
|
||||
|
||||
# Test function
|
||||
def test():
|
||||
data = '{"name": "John Doe", "age": 30, "city": "New York"}'
|
||||
upload_data_to_services(data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
19
pastedb/example_post_paste2org.py
Normal file
19
pastedb/example_post_paste2org.py
Normal file
@ -0,0 +1,19 @@
|
||||
import requests
|
||||
|
||||
# Set the URL of the Paste2.org API endpoint
|
||||
url = 'https://paste2.org/'
|
||||
|
||||
# Get the input from the user
|
||||
text = 'Enter text to upload'
|
||||
|
||||
# Send the HTTP POST request to the Paste2.org API with the text as the request body
|
||||
response = requests.post(url, data=text.encode('utf-8'))
|
||||
|
||||
# Get the URL of the uploaded text from the response JSON
|
||||
if response.status_code == 200:
|
||||
paste_id = response.json().get('id')
|
||||
paste_url = f'https://paste2.org/{paste_id}'
|
||||
print('Uploaded to:', paste_url)
|
||||
else:
|
||||
print('Error uploading text:', response.text)
|
||||
|
46
pastedb/example_post_pastie.py
Normal file
46
pastedb/example_post_pastie.py
Normal file
@ -0,0 +1,46 @@
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import hashlib
|
||||
|
||||
# Define your JSON object with a unique timestamp and MD5 hash
|
||||
my_json = {'timestamp': int(time.time()), 'name': 'John', 'age': 30, 'city': 'New York'}
|
||||
json_str = json.dumps(my_json, sort_keys=True)
|
||||
md5_hash = hashlib.md5(json_str.encode()).hexdigest()
|
||||
my_json['md5'] = md5_hash
|
||||
|
||||
# Define the API endpoint for Pastie
|
||||
pastie_url = 'https://pastie.io/documents'
|
||||
|
||||
# Upload the JSON object to Pastie and get the URL
|
||||
response = requests.post(pastie_url, data=json.dumps(my_json))
|
||||
if response.status_code == 200:
|
||||
key = response.json()['key']
|
||||
pastie_url = f'https://pastie.io/{key}'
|
||||
print(f'JSON object uploaded to Pastie: {pastie_url}')
|
||||
|
||||
# Add the URL and service name to the dictionary for later querying
|
||||
paste_dict = {}
|
||||
if os.path.isfile('paste_dict.json'):
|
||||
with open('paste_dict.json', 'r') as f:
|
||||
paste_dict = json.load(f)
|
||||
paste_dict[key] = {'url': pastie_url, 'service': 'Pastie'}
|
||||
|
||||
# Write the URL dictionary to a file on disk
|
||||
with open('paste_dict.json', 'w') as f:
|
||||
json.dump(paste_dict, f, indent=4)
|
||||
else:
|
||||
print('Error uploading JSON object to Pastie')
|
||||
|
||||
# Query the dictionary for the URL of a specific paste
|
||||
if os.path.isfile('paste_dict.json'):
|
||||
with open('paste_dict.json', 'r') as f:
|
||||
paste_dict = json.load(f)
|
||||
key_to_query = key if key in paste_dict else list(paste_dict.keys())[0]
|
||||
url = paste_dict[key_to_query]['url']
|
||||
service = paste_dict[key_to_query]['service']
|
||||
print(f'URL for paste with key {key_to_query} (stored on {service}): {url}')
|
||||
else:
|
||||
print('URL dictionary file not found')
|
||||
|
18
pastedb/example_post_snippet.py
Normal file
18
pastedb/example_post_snippet.py
Normal file
@ -0,0 +1,18 @@
|
||||
import requests
|
||||
|
||||
url = "https://snippet.host/api/documents"
|
||||
|
||||
# Text to be pasted
|
||||
text = "Hello, world!"
|
||||
|
||||
# Create a new paste
|
||||
response = requests.post(url, data=text)
|
||||
|
||||
if response.status_code == 200:
|
||||
# Extract the URL of the newly created paste
|
||||
paste_url = f"https://snippet.host/{response.json()['key']}"
|
||||
print(f"Paste URL: {paste_url}")
|
||||
else:
|
||||
# If the response code is not 200, print the response body for debugging
|
||||
print(f"Error: {response.text}")
|
||||
|
39
pastedb/example_post_sprunge.py
Normal file
39
pastedb/example_post_sprunge.py
Normal file
@ -0,0 +1,39 @@
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
url = 'http://sprunge.us'
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
# Send the HTTP POST request to the Sprunge API
|
||||
response = requests.post(url, data={'sprunge': json_data})
|
||||
if response.status_code == 200:
|
||||
|
||||
# Get the URL of the uploaded text from the response body
|
||||
sprunge_url = response.text.strip()
|
||||
|
||||
print('Uploaded to:', sprunge_url)
|
||||
|
||||
# Use a regular expression to extract the random ID from the URL
|
||||
match = re.match(r'^http://sprunge\.us/(\w+)$', sprunge_url)
|
||||
if match:
|
||||
random_id = match.group(1)
|
||||
print('Random ID:', random_id)
|
||||
key = "sprunge_" + random_id + '_' + md5sum[:5]
|
||||
else:
|
||||
print('Invalid Sprunge URL:', sprunge_url)
|
||||
|
||||
return {
|
||||
"service": "sprunge",
|
||||
"name": key,
|
||||
"key": sprunge_url,
|
||||
"md5sum": md5sum
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
5
pastedb/example_post_zerobin.py
Normal file
5
pastedb/example_post_zerobin.py
Normal file
@ -0,0 +1,5 @@
|
||||
import requests
|
||||
|
||||
data = {"data": 'print("Hello!")'}
|
||||
r = requests.post("https://zerobin.net/?paste", data=data)
|
||||
print(f"URL: {r.text}")
|
331
pastedb/lodgeit.py
Executable file
331
pastedb/lodgeit.py
Executable file
@ -0,0 +1,331 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
LodgeIt!
|
||||
~~~~~~~~
|
||||
|
||||
A script that pastes stuff into the lodgeit pastebin.
|
||||
|
||||
.lodgeitrc / _lodgeitrc
|
||||
-----------------------
|
||||
|
||||
Under UNIX create a file called ``~/.lodgeitrc``, under Windows
|
||||
create a file ``%APPDATA%/_lodgeitrc`` to override defaults::
|
||||
|
||||
language=default_language
|
||||
clipboard=true/false
|
||||
open_browser=true/false
|
||||
encoding=fallback_charset
|
||||
|
||||
:authors: 2007-2010 Georg Brandl <georg@python.org>,
|
||||
2006 Armin Ronacher <armin.ronacher@active-4.com>,
|
||||
2006 Matt Good <matt@matt-good.net>,
|
||||
2005 Raphael Slinckx <raphael@slinckx.net>
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
from six import text_type
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
SCRIPT_NAME = os.path.basename(sys.argv[0])
|
||||
VERSION = '0.3'
|
||||
SETTING_KEYS = ['author', 'title', 'language', 'private', 'clipboard',
|
||||
'open_browser']
|
||||
|
||||
# global server proxy
|
||||
_xmlrpc_service = None
|
||||
_server_name = None
|
||||
|
||||
|
||||
def fail(msg, code):
|
||||
"""Bail out with an error message."""
|
||||
print('ERROR: %s' % msg, file=sys.stderr)
|
||||
sys.exit(code)
|
||||
|
||||
|
||||
def load_default_settings():
|
||||
"""Load the defaults from the lodgeitrc file."""
|
||||
settings = {
|
||||
'language': None,
|
||||
'clipboard': True,
|
||||
'open_browser': False,
|
||||
'encoding': 'iso-8859-15',
|
||||
'server_name': 'http://paste.openstack.org',
|
||||
}
|
||||
rcfile = None
|
||||
if os.name == 'posix':
|
||||
rcfile = os.path.expanduser('~/.lodgeitrc')
|
||||
elif os.name == 'nt' and 'APPDATA' in os.environ:
|
||||
rcfile = os.path.expandvars(r'$APPDATA\_lodgeitrc')
|
||||
if rcfile:
|
||||
try:
|
||||
f = open(rcfile)
|
||||
for line in f:
|
||||
if line.strip()[:1] in '#;':
|
||||
continue
|
||||
p = line.split('=', 1)
|
||||
if len(p) == 2:
|
||||
key = p[0].strip().lower()
|
||||
if key in settings:
|
||||
if key in ('clipboard', 'open_browser'):
|
||||
settings[key] = p[1].strip().lower() in \
|
||||
('true', '1', 'on', 'yes')
|
||||
else:
|
||||
settings[key] = p[1].strip()
|
||||
f.close()
|
||||
except IOError:
|
||||
pass
|
||||
settings['tags'] = []
|
||||
settings['title'] = None
|
||||
return settings
|
||||
|
||||
|
||||
def make_utf8(text, encoding):
|
||||
"""Convert a text to UTF-8, brute-force."""
|
||||
try:
|
||||
u = text_type(text, 'utf-8')
|
||||
uenc = 'utf-8'
|
||||
except UnicodeError:
|
||||
try:
|
||||
u = text_type(text, encoding)
|
||||
uenc = 'utf-8'
|
||||
except UnicodeError:
|
||||
u = text_type(text, 'iso-8859-15', 'ignore')
|
||||
uenc = 'iso-8859-15'
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
return u.encode('utf-8')
|
||||
d = chardet.detect(text)
|
||||
if d['encoding'] == uenc:
|
||||
return u.encode('utf-8')
|
||||
return text_type(text, d['encoding'], 'ignore').encode('utf-8')
|
||||
|
||||
|
||||
def get_xmlrpc_service():
|
||||
"""Create the XMLRPC server proxy and cache it."""
|
||||
global _xmlrpc_service
|
||||
import xmlrpclib
|
||||
if _xmlrpc_service is None:
|
||||
try:
|
||||
_xmlrpc_service = xmlrpclib.ServerProxy(_server_name + 'xmlrpc/',
|
||||
allow_none=True)
|
||||
except Exception as err:
|
||||
fail('Could not connect to Pastebin: %s' % err, -1)
|
||||
return _xmlrpc_service
|
||||
|
||||
|
||||
def copy_url(url):
|
||||
"""Copy the url into the clipboard."""
|
||||
# try windows first
|
||||
try:
|
||||
import win32clipboard
|
||||
except ImportError:
|
||||
# then give pbcopy a try. do that before gtk because
|
||||
# gtk might be installed on os x but nobody is interested
|
||||
# in the X11 clipboard there.
|
||||
from subprocess import Popen, PIPE
|
||||
for prog in 'pbcopy', 'xclip':
|
||||
try:
|
||||
client = Popen([prog], stdin=PIPE)
|
||||
except OSError:
|
||||
continue
|
||||
else:
|
||||
client.stdin.write(url)
|
||||
client.stdin.close()
|
||||
client.wait()
|
||||
break
|
||||
else:
|
||||
try:
|
||||
import pygtk
|
||||
pygtk.require('2.0')
|
||||
import gtk
|
||||
import gobject
|
||||
except ImportError:
|
||||
return
|
||||
gtk.clipboard_get(gtk.gdk.SELECTION_CLIPBOARD).set_text(url)
|
||||
gobject.idle_add(gtk.main_quit)
|
||||
gtk.main()
|
||||
else:
|
||||
win32clipboard.OpenClipboard()
|
||||
win32clipboard.EmptyClipboard()
|
||||
win32clipboard.SetClipboardText(url)
|
||||
win32clipboard.CloseClipboard()
|
||||
|
||||
|
||||
def open_webbrowser(url):
|
||||
"""Open a new browser window."""
|
||||
import webbrowser
|
||||
webbrowser.open(url)
|
||||
|
||||
|
||||
def language_exists(language):
|
||||
"""Check if a language alias exists."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
langs = xmlrpc.pastes.getLanguages()
|
||||
return language in langs
|
||||
|
||||
|
||||
def get_mimetype(data, filename):
|
||||
"""Try to get MIME type from data."""
|
||||
try:
|
||||
import gnomevfs
|
||||
except ImportError:
|
||||
from mimetypes import guess_type
|
||||
if filename:
|
||||
return guess_type(filename)[0]
|
||||
else:
|
||||
if filename:
|
||||
return gnomevfs.get_mime_type(os.path.abspath(filename))
|
||||
return gnomevfs.get_mime_type_for_data(data)
|
||||
|
||||
|
||||
def print_languages():
|
||||
"""Print a list of all supported languages, with description."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
languages = xmlrpc.pastes.getLanguages().items()
|
||||
languages.sort(key=lambda a: a[1].lower())
|
||||
print('Supported Languages:')
|
||||
for alias, name in languages:
|
||||
print(' %-30s%s' % (alias, name))
|
||||
|
||||
|
||||
def download_paste(uid):
|
||||
"""Download a paste given by ID."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
paste = xmlrpc.pastes.getPaste(uid)
|
||||
if not paste:
|
||||
fail('Paste "%s" does not exist.' % uid, 5)
|
||||
print(paste['code'].encode('utf-8'))
|
||||
|
||||
|
||||
def create_paste(code, language, filename, mimetype, private):
|
||||
"""Create a new paste."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
rv = xmlrpc.pastes.newPaste(language, code, None, filename, mimetype,
|
||||
private)
|
||||
if not rv:
|
||||
fail('Could not create paste. Something went wrong '
|
||||
'on the server side.', 4)
|
||||
return rv
|
||||
|
||||
|
||||
def compile_paste(filenames, langopt):
|
||||
"""Create a single paste out of zero, one or multiple files."""
|
||||
def read_file(f):
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
mime = ''
|
||||
lang = langopt or ''
|
||||
if not filenames:
|
||||
data = read_file(sys.stdin)
|
||||
print('Pasting...')
|
||||
if not langopt:
|
||||
mime = get_mimetype(data, '') or ''
|
||||
fname = ''
|
||||
elif len(filenames) == 1:
|
||||
fname = filenames[0]
|
||||
data = read_file(open(filenames[0], 'rb'))
|
||||
if not langopt:
|
||||
mime = get_mimetype(data, filenames[0]) or ''
|
||||
else:
|
||||
result = []
|
||||
for fname in filenames:
|
||||
data = read_file(open(fname, 'rb'))
|
||||
if langopt:
|
||||
result.append('### %s [%s]\n\n' % (fname, langopt))
|
||||
else:
|
||||
result.append('### %s\n\n' % fname)
|
||||
result.append(data)
|
||||
result.append('\n\n')
|
||||
data = ''.join(result)
|
||||
lang = 'multi'
|
||||
fname = ''
|
||||
return data, lang, fname, mime
|
||||
|
||||
|
||||
def main():
|
||||
"""Main script entry point."""
|
||||
global _server_name
|
||||
|
||||
usage = ('Usage: %%prog [options] [FILE ...]\n\n'
|
||||
'Read the files and paste their contents to LodgeIt pastebin.\n'
|
||||
'If no file is given, read from standard input.\n'
|
||||
'If multiple files are given, they are put into a single paste.')
|
||||
parser = OptionParser(usage=usage)
|
||||
|
||||
settings = load_default_settings()
|
||||
|
||||
parser.add_option('-v', '--version', action='store_true',
|
||||
help='Print script version')
|
||||
parser.add_option('-L', '--languages', action='store_true', default=False,
|
||||
help='Retrieve a list of supported languages')
|
||||
parser.add_option('-l', '--language', default=settings['language'],
|
||||
help='Used syntax highlighter for the file')
|
||||
parser.add_option('-e', '--encoding', default=settings['encoding'],
|
||||
help='Specify the encoding of a file (default is '
|
||||
'utf-8 or guessing if available)')
|
||||
parser.add_option('-b', '--open-browser', dest='open_browser',
|
||||
action='store_true',
|
||||
default=settings['open_browser'],
|
||||
help='Open the paste in a web browser')
|
||||
parser.add_option('-p', '--private', action='store_true', default=False,
|
||||
help='Paste as private')
|
||||
parser.add_option('--no-clipboard', dest='clipboard',
|
||||
action='store_false',
|
||||
default=settings['clipboard'],
|
||||
help="Don't copy the url into the clipboard")
|
||||
parser.add_option('--download', metavar='UID',
|
||||
help='Download a given paste')
|
||||
parser.add_option('-s', '--server', default=settings['server_name'],
|
||||
dest='server_name',
|
||||
help="Specify the pastebin to send data")
|
||||
opts, args = parser.parse_args()
|
||||
|
||||
# The global available server name
|
||||
_server_name = opts.server_name
|
||||
if not _server_name.endswith('/'):
|
||||
_server_name += '/'
|
||||
|
||||
# special modes of operation:
|
||||
# - paste script version
|
||||
if opts.version:
|
||||
print('%s: version %s' % (SCRIPT_NAME, VERSION))
|
||||
sys.exit()
|
||||
# - print list of languages
|
||||
elif opts.languages:
|
||||
print_languages()
|
||||
sys.exit()
|
||||
# - download Paste
|
||||
elif opts.download:
|
||||
download_paste(opts.download)
|
||||
sys.exit()
|
||||
|
||||
# check language if given
|
||||
if opts.language and not language_exists(opts.language):
|
||||
fail('Language %s is not supported.' % opts.language, 3)
|
||||
|
||||
# load file(s)
|
||||
try:
|
||||
data, language, filename, mimetype = compile_paste(args, opts.language)
|
||||
except Exception as err:
|
||||
fail('Error while reading the file(s): %s' % err, 2)
|
||||
if not data:
|
||||
fail('Aborted, no content to paste.', 4)
|
||||
|
||||
# create paste
|
||||
code = make_utf8(data, opts.encoding)
|
||||
pid = create_paste(code, language, filename, mimetype, opts.private)
|
||||
url = '%sshow/%s/' % (_server_name, pid)
|
||||
print(url)
|
||||
if opts.open_browser:
|
||||
open_webbrowser(url)
|
||||
if opts.clipboard:
|
||||
copy_url(url)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
69
pastedb/lodgeit_python3.py
Executable file
69
pastedb/lodgeit_python3.py
Executable file
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
LodgeIt!
|
||||
~~~~~~~~
|
||||
|
||||
A script that pastes stuff into the lodgeit pastebin.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
VERSION = '0.3'
|
||||
SERVER_NAME = 'http://paste.openstack.org/'
|
||||
|
||||
def upload_paste(paste_content, title=None, language=None, private=None):
|
||||
"""
|
||||
Uploads a paste to LodgeIt!
|
||||
|
||||
:param paste_content: the content of the paste to upload
|
||||
:param title: the title of the paste (optional)
|
||||
:param language: the language of the paste (optional)
|
||||
:param private: whether the paste should be private (optional)
|
||||
:return: the URL of the uploaded paste
|
||||
"""
|
||||
# build the POST data
|
||||
data = {
|
||||
'content': paste_content.encode('utf-8'),
|
||||
'format': 'text',
|
||||
}
|
||||
if title is not None:
|
||||
data['name'] = title
|
||||
if language is not None:
|
||||
data['language'] = language
|
||||
if private is not None:
|
||||
data['private'] = private
|
||||
|
||||
# make the request
|
||||
url = urllib.parse.urljoin(SERVER_NAME, '/pastes')
|
||||
request = urllib.request.Request(url, data=urllib.parse.urlencode(data).encode('utf-8'))
|
||||
response = urllib.request.urlopen(request)
|
||||
|
||||
# parse the response and return the URL of the new paste
|
||||
location = response.getheader('Location')
|
||||
if location is None:
|
||||
raise ValueError('Could not find the URL of the new paste')
|
||||
return location
|
||||
|
||||
def main():
|
||||
# parse the command-line arguments
|
||||
parser = argparse.ArgumentParser(description='Upload a paste to LodgeIt!')
|
||||
parser.add_argument('filename', help='the name of the file to upload')
|
||||
parser.add_argument('--title', help='the title of the paste')
|
||||
parser.add_argument('--language', help='the language of the paste')
|
||||
parser.add_argument('--private', action='store_true', help='make the paste private')
|
||||
args = parser.parse_args()
|
||||
|
||||
# read the content of the file to upload
|
||||
with open(args.filename, 'r') as f:
|
||||
paste_content = f.read()
|
||||
|
||||
# upload the paste and print the URL of the new paste
|
||||
url = upload_paste(paste_content, args.title, args.language, args.private)
|
||||
print(url)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
30
pastedb/old_upload.py
Normal file
30
pastedb/old_upload.py
Normal file
@ -0,0 +1,30 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import hashlib
|
||||
from services import pastie, dpaste, rentry, defau, sprunge, opendev
|
||||
from data import data
|
||||
|
||||
def save(data):
|
||||
# Upload to the available services
|
||||
paste_dict = {'name': name}
|
||||
successes = []
|
||||
failures = []
|
||||
for service in [defau]:
|
||||
try:
|
||||
result = service.upload(data)
|
||||
add_data(result["service"], result["key"], result["md5sum"] )
|
||||
successes.append(result['name'])
|
||||
except Exception as e:
|
||||
failures.append(f"{service.__name__}: {str(e)}")
|
||||
|
||||
# Print upload results
|
||||
print(f"Upload successful to {len(successes)}/{len(successes)+len(failures)} services:")
|
||||
for name in successes:
|
||||
print(f"- {name}")
|
||||
if failures:
|
||||
print("Upload failed to the following services:")
|
||||
for error in failures:
|
||||
print(f"- {error}")
|
||||
|
||||
print(f"Your paste trace is: {name}")
|
26
pastedb/opendev.py
Executable file
26
pastedb/opendev.py
Executable file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import requests
|
||||
|
||||
url = 'https://paste.opendev.org/json/'
|
||||
|
||||
# read input from stdin
|
||||
input_str = sys.stdin.read()
|
||||
|
||||
# create data for new paste
|
||||
data = {
|
||||
'language': '',
|
||||
'code': input_str,
|
||||
'private': False
|
||||
}
|
||||
|
||||
# send request to create new paste
|
||||
response = requests.post(url + '?method=pastes.newPaste', data=data)
|
||||
|
||||
# extract URL of newly created paste from response
|
||||
paste_id = response.text.strip()
|
||||
paste_url = f'https://paste.opendev.org/show/{paste_id}'
|
||||
|
||||
# print URL of newly created paste
|
||||
print(paste_url)
|
||||
|
45
pastedb/opendev_2nd.py
Executable file
45
pastedb/opendev_2nd.py
Executable file
@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
url = 'https://paste.opendev.org/json/'
|
||||
|
||||
# read input from stdin
|
||||
data = '{ "name": "joe", "age": 55}'
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
content = json.dumps(data)
|
||||
|
||||
# create JSON payload for new paste
|
||||
payload = {
|
||||
'language': 'text',
|
||||
'code': content,
|
||||
'private': False
|
||||
#'expire': '1day'
|
||||
}
|
||||
|
||||
# send request to create new paste
|
||||
response = requests.post(url + '?method=pastes.newPaste', json=payload)
|
||||
|
||||
status = response.status_code
|
||||
paste_id = response.json()['data']
|
||||
|
||||
if status == 200:
|
||||
#print(f'JSON object uploaded to dpaste.com: {dpaste_url}')
|
||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||
|
||||
return {
|
||||
'service': 'url',
|
||||
'key': paste_id,
|
||||
'md5sum': md5sum,
|
||||
'name': 'opendev_' + paste_id + '_' + md5sum[:5]
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
||||
# we can get the paste back by:
|
||||
## $ > curl -d '{"paste_id":819463}' -H 'Content-Type: application/json' https://paste.opendev.org/json/?method=pastes.getPaste |jq .data.code
|
12
pastedb/paste_dict.json
Normal file
12
pastedb/paste_dict.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "1680462198-dxx",
|
||||
"pastie": {
|
||||
"service": "pastie",
|
||||
"key": "jpllic",
|
||||
"md5sum": "5e87ba1e0d151a399c0418343842b94d"
|
||||
},
|
||||
"ogqfsh": {
|
||||
"url": "https://pastie.io/ogqfsh",
|
||||
"service": "Pastie"
|
||||
}
|
||||
}
|
1
pastedb/pastedb01/data.json
Normal file
1
pastedb/pastedb01/data.json
Normal file
@ -0,0 +1 @@
|
||||
e71ad7d7815eef99ea4545da4ea22e9977c10c62c8e57cc794228e553c44e22f80cf5733aa00a53b6794b1e2732ab093eb63914155137988e58f8906acc5b57d02a981a2320a5b72a99ceaa5b68e2401
|
0
pastedb/pastedb01/data/__init__.py
Normal file
0
pastedb/pastedb01/data/__init__.py
Normal file
BIN
pastedb/pastedb01/data/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/data/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/data/__pycache__/data.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/data/__pycache__/data.cpython-310.pyc
Normal file
Binary file not shown.
122
pastedb/pastedb01/data/data.py
Normal file
122
pastedb/pastedb01/data/data.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""
|
||||
This module provides functions for managing and saving data in a dictionary object. It also supports encryption and decryption of the data when saving and loading it to/from disk. The functions in this module include:
|
||||
|
||||
set_encryption_key(key: bytes): sets the encryption key to be used for encrypting the data.
|
||||
add_data(service_tag: str, key: str, md5sum: str) -> str: adds data for a service to the data dictionary.
|
||||
save_data(filename: str, data: dict, key=None): writes the data dictionary to disk as a JSON object. If a key is provided, it encrypts the data using the AES symmetric encryption algorithm before writing to disk.
|
||||
encrypt_data(data: bytes) -> bytes: encrypts data using the AES symmetric encryption algorithm.
|
||||
load_data(file_path: str) -> Dict: loads the data from a file and returns it as a dictionary. If the file is encrypted, it uses the provided key to decrypt it before returning the data.
|
||||
|
||||
This module depends on the following packages: hashlib, Crypto, and collections.
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from Cryptodome.Cipher import AES
|
||||
from Cryptodome.Util import Padding
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
data = defaultdict(lambda: {"timestamp": 0, "services": {}})
|
||||
|
||||
_ENCRYPTION_KEY = None
|
||||
|
||||
def set_encryption_key(key: bytes):
|
||||
global _ENCRYPTION_KEY
|
||||
_ENCRYPTION_KEY = key
|
||||
|
||||
|
||||
def add_data(service_tag: str, key: str, md5sum: str) -> str:
|
||||
"""
|
||||
Adds data for a service to the `data` dictionary.
|
||||
|
||||
Parameters:
|
||||
service_tag (str): A string representing the service being added.
|
||||
key (str): A string representing the key for the service being added.
|
||||
md5sum (str): A string representing the MD5 checksum for the service being added.
|
||||
|
||||
Returns:
|
||||
str: A string representing the unique ID of the run that the data was added to.
|
||||
"""
|
||||
|
||||
# Generate a unique ID for the run
|
||||
run_id = f"run-{hashlib.sha256(str(data).encode()).hexdigest()[:6]}"
|
||||
timestamp = int(time.time())
|
||||
|
||||
# Add the service data to the run
|
||||
data[run_id]["timestamp"] = timestamp
|
||||
data[run_id]["services"][service_tag] = {"key": key, "md5sum": md5sum}
|
||||
|
||||
return run_id
|
||||
|
||||
|
||||
def save_data(filename: str, data: dict, key=None):
|
||||
"""
|
||||
Writes the data dictionary to disk as a JSON object.
|
||||
|
||||
Parameters:
|
||||
filename (str): A string representing the filename to write the data to.
|
||||
data (dict): A dictionary representing the data to be written to disk.
|
||||
key (bytes): Optional bytes representing a key to use for encryption.
|
||||
"""
|
||||
with open(filename, "w") as f:
|
||||
# Serialize the data dictionary as a JSON object
|
||||
json_data = json.dumps(data)
|
||||
|
||||
# If a key is provided, encrypt the JSON data
|
||||
if _ENCRYPTION_KEY:
|
||||
# Encrypt the data using the key
|
||||
encrypted_data = encrypt_data(json_data.encode())
|
||||
|
||||
# Write the encrypted data to the file
|
||||
f.write(encrypted_data.hex())
|
||||
else:
|
||||
# Write the unencrypted JSON data to the file
|
||||
print("you need to set the encryption key first.")
|
||||
|
||||
|
||||
def encrypt_data(data: bytes) -> bytes:
|
||||
"""
|
||||
Encrypts data using the AES symmetric encryption algorithm.
|
||||
|
||||
Parameters:
|
||||
data (bytes): A bytes object representing the data to be encrypted.
|
||||
|
||||
Returns:
|
||||
bytes: A bytes object representing the encrypted data.
|
||||
"""
|
||||
# Generate a random initialization vector (IV)
|
||||
iv = os.urandom(AES.block_size)
|
||||
|
||||
# Pad the data to a multiple of the block size
|
||||
padded_data = Padding.pad(data, AES.block_size)
|
||||
|
||||
# Create an AES cipher object
|
||||
cipher = AES.new(_ENCRYPTION_KEY, AES.MODE_CBC, iv)
|
||||
|
||||
# Encrypt the data using CBC mode
|
||||
encrypted_data = cipher.encrypt(padded_data)
|
||||
|
||||
# Prepend the IV to the encrypted data
|
||||
return iv + encrypted_data
|
||||
|
||||
|
||||
def load_data(file_path: str, key=None) -> dict:
|
||||
"""
|
||||
Load the data from a file and return it as a dictionary.
|
||||
|
||||
:param file_path: The path to the file to load.
|
||||
:param key: The key to use to decrypt the file.
|
||||
:return: A dictionary representing the data from the file.
|
||||
"""
|
||||
if _ENCRYPTION_KEY:
|
||||
with open(file_path, "rb") as f:
|
||||
ciphertext = f.read()
|
||||
|
||||
fernet = Fernet(_ENCRYPTION_KEY.encode())
|
||||
plaintext = fernet.decrypt(ciphertext)
|
||||
return json.loads(plaintext.decode())
|
||||
else:
|
||||
print("you need to set the encryption key first.")
|
||||
return '{"you need to set the encryption key first."}'
|
BIN
pastedb/pastedb01/external/pastebinit_1.5.1-1_all.deb
vendored
Normal file
BIN
pastedb/pastedb01/external/pastebinit_1.5.1-1_all.deb
vendored
Normal file
Binary file not shown.
161
pastedb/pastedb01/external/rentry
vendored
Executable file
161
pastedb/pastedb01/external/rentry
vendored
Executable file
@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import getopt
|
||||
import http.cookiejar
|
||||
import sys
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.cookies import SimpleCookie
|
||||
from json import loads as json_loads
|
||||
from os import environ
|
||||
|
||||
_headers = {"Referer": 'https://rentry.co'}
|
||||
|
||||
|
||||
class UrllibClient:
|
||||
"""Simple HTTP Session Client, keeps cookies."""
|
||||
|
||||
def __init__(self):
|
||||
self.cookie_jar = http.cookiejar.CookieJar()
|
||||
self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookie_jar))
|
||||
urllib.request.install_opener(self.opener)
|
||||
|
||||
def get(self, url, headers={}):
|
||||
request = urllib.request.Request(url, headers=headers)
|
||||
return self._request(request)
|
||||
|
||||
def post(self, url, data=None, headers={}):
|
||||
postdata = urllib.parse.urlencode(data).encode()
|
||||
request = urllib.request.Request(url, postdata, headers)
|
||||
return self._request(request)
|
||||
|
||||
def _request(self, request):
|
||||
response = self.opener.open(request)
|
||||
response.status_code = response.getcode()
|
||||
response.data = response.read().decode('utf-8')
|
||||
return response
|
||||
|
||||
|
||||
def raw(url):
|
||||
client = UrllibClient()
|
||||
return json_loads(client.get('https://rentry.co/api/raw/{}'.format(url)).data)
|
||||
|
||||
|
||||
def new(url, edit_code, text):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'url': url,
|
||||
'edit_code': edit_code,
|
||||
'text': text
|
||||
}
|
||||
|
||||
return json_loads(client.post('https://rentry.co/api/new', payload, headers=_headers).data)
|
||||
|
||||
|
||||
def edit(url, edit_code, text):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'edit_code': edit_code,
|
||||
'text': text
|
||||
}
|
||||
|
||||
return json_loads(client.post('https://rentry.co/api/edit/{}'.format(url), payload, headers=_headers).data)
|
||||
|
||||
|
||||
def usage():
|
||||
print('''
|
||||
Usage: rentry {new | edit | raw} {-h | --help} {-u | --url} {-p | --edit-code} text
|
||||
|
||||
Commands:
|
||||
new create a new entry
|
||||
edit edit an existing entry
|
||||
raw get raw markdown text of an existing entry
|
||||
|
||||
Options:
|
||||
-h, --help show this help message and exit
|
||||
-u, --url URL url for the entry, random if not specified
|
||||
-p, --edit-code EDIT-CODE edit code for the entry, random if not specified
|
||||
|
||||
Examples:
|
||||
rentry new 'markdown text' # new entry with random url and edit code
|
||||
rentry new -p pw -u example 'text' # with custom edit code and url
|
||||
rentry edit -p pw -u example 'text' # edit the example entry
|
||||
cat FILE | rentry new # read from FILE and paste it to rentry
|
||||
cat FILE | rentry edit -p pw -u example # read from FILE and edit the example entry
|
||||
rentry raw -u example # get raw markdown text
|
||||
rentry raw -u https://rentry.co/example # -u accepts absolute and relative urls
|
||||
''')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
environ.pop('POSIXLY_CORRECT', None)
|
||||
opts, args = getopt.gnu_getopt(sys.argv[1:], "hu:p:", ["help", "url=", "edit-code="])
|
||||
except getopt.GetoptError as e:
|
||||
sys.exit("error: {}".format(e))
|
||||
|
||||
command, url, edit_code, text = None, '', '', None
|
||||
|
||||
for o, a in opts:
|
||||
if o in ("-h", "--help"):
|
||||
usage()
|
||||
sys.exit()
|
||||
elif o in ("-u", "--url"):
|
||||
url = urllib.parse.urlparse(a).path.strip('/')
|
||||
elif o in ("-p", "--edit-code"):
|
||||
edit_code = a
|
||||
|
||||
command = (args[0:1] or [None])[0]
|
||||
command or sys.exit(usage())
|
||||
command in ['new', 'edit', 'raw'] or sys.exit('error: command must be new, edit or raw')
|
||||
|
||||
text = (args[1:2] or [None])[0]
|
||||
if not text and command != 'raw':
|
||||
text = sys.stdin.read().strip()
|
||||
text or sys.exit('error: text is required')
|
||||
|
||||
if command == 'new':
|
||||
response = new(url, edit_code, text)
|
||||
if response['status'] != '200':
|
||||
print('error: {}'.format(response['content']))
|
||||
try:
|
||||
for i in response['errors'].split('.'):
|
||||
i and print(i)
|
||||
sys.exit(1)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Url: {}\nEdit code: {}'.format(response['url'], response['edit_code']))
|
||||
|
||||
elif command == 'edit':
|
||||
url or sys.exit('error: url is required')
|
||||
edit_code or sys.exit('error: edit code is required')
|
||||
|
||||
response = edit(url, edit_code, text)
|
||||
if response['status'] != '200':
|
||||
print('error: {}'.format(response['content']))
|
||||
try:
|
||||
for i in response['errors'].split('.'):
|
||||
i and print(i)
|
||||
sys.exit(1)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Ok')
|
||||
|
||||
elif command == 'raw':
|
||||
url or sys.exit('error: url is required')
|
||||
response = raw(url)
|
||||
if response['status'] != '200':
|
||||
sys.exit('error: {}'.format(response['content']))
|
||||
print(response['content'])
|
9
pastedb/pastedb01/paste_dict.json
Normal file
9
pastedb/pastedb01/paste_dict.json
Normal file
@ -0,0 +1,9 @@
|
||||
|
||||
{
|
||||
"name": "1680593168-456462",
|
||||
"defau_4e062": {
|
||||
"service": "p.defau.lt",
|
||||
"key": "https://p.defau.lt/?QtZaVCSgOsVMmI1xS_ofqw",
|
||||
"md5sum": "4e062894f660d3c69640129f9fd0a09e"
|
||||
}
|
||||
}
|
0
pastedb/pastedb01/services/__init__.py
Normal file
0
pastedb/pastedb01/services/__init__.py
Normal file
BIN
pastedb/pastedb01/services/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/defau.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/defau.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/dpaste.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/dpaste.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/opendev.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/opendev.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/paste2.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/paste2.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/pastie.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/pastie.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/rentry.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/rentry.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/sprunge.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/sprunge.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/termbin.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/termbin.cpython-310.pyc
Normal file
Binary file not shown.
38
pastedb/pastedb01/services/defau.py
Normal file
38
pastedb/pastedb01/services/defau.py
Normal file
@ -0,0 +1,38 @@
|
||||
"""
|
||||
This module defines functions to interact with the p.defau.lt service for uploading and retrieving code snippets.
|
||||
|
||||
Functions:
|
||||
- get_service_tag(): Returns a string representing the service tag for p.defau.lt.
|
||||
- upload(data): Uploads a code snippet to p.defau.lt and returns a dictionary containing metadata about the upload.
|
||||
- get(trace): Retrieves the code snippet associated with the provided trace from p.defau.lt.
|
||||
"""
|
||||
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
URL = 'https://p.defau.lt/submit.php'
|
||||
NAME_PREFIX = 'defau_'
|
||||
|
||||
def get_service_tag():
|
||||
return 'p.defau.lt'
|
||||
|
||||
def upload(data):
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
try:
|
||||
response = requests.post(URL, data={'code': json_data})
|
||||
response.raise_for_status()
|
||||
key = response.url
|
||||
name = f"{NAME_PREFIX}{md5sum[:5]}"
|
||||
return {'name': name, 'service': get_service_tag(), 'key': key, 'md5sum': md5sum}
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get(trace):
|
||||
url = trace[key]
|
||||
response = requests.request.get(url)
|
||||
return response.content
|
||||
|
37
pastedb/pastedb01/services/dpaste.py
Normal file
37
pastedb/pastedb01/services/dpaste.py
Normal file
@ -0,0 +1,37 @@
|
||||
import hashlib
|
||||
import json
|
||||
import requests
|
||||
|
||||
NAME = 'dpaste'
|
||||
|
||||
def get_service_tag():
|
||||
return NAME
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
content = json.dumps(data)
|
||||
syntax = 'json'
|
||||
expiry_days = ''
|
||||
|
||||
r = requests.post('https://dpaste.com/api/v2/',
|
||||
data={'content': content,
|
||||
'syntax': syntax,
|
||||
'expiry_days': expiry_days},
|
||||
headers={'User-Agent': 'My Python Project'})
|
||||
|
||||
if r.status_code == 201:
|
||||
dpaste_url = r.headers['Location']
|
||||
#print(f'JSON object uploaded to dpaste.com: {dpaste_url}')
|
||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||
|
||||
return {
|
||||
'service': NAME,
|
||||
'key': dpaste_url,
|
||||
'md5sum': md5sum,
|
||||
'name': NAME + '_' + dpaste_url.rsplit('/', 1)[-1]
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
||||
|
70
pastedb/pastedb01/services/old_rentry.py
Normal file
70
pastedb/pastedb01/services/old_rentry.py
Normal file
@ -0,0 +1,70 @@
|
||||
import json
|
||||
import hashlib
|
||||
import http.cookiejar
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.cookies import SimpleCookie
|
||||
|
||||
_headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}
|
||||
|
||||
class UrllibClient:
|
||||
"""Simple HTTP Session Client, keeps cookies."""
|
||||
|
||||
def __init__(self):
|
||||
self.cookie_jar = http.cookiejar.CookieJar()
|
||||
self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookie_jar))
|
||||
urllib.request.install_opener(self.opener)
|
||||
|
||||
def get(self, url, headers={}):
|
||||
request = urllib.request.Request(url, headers=headers)
|
||||
return self._request(request)
|
||||
|
||||
def post(self, url, data=None, headers={}):
|
||||
postdata = urllib.parse.urlencode(data).encode()
|
||||
request = urllib.request.Request(url, postdata, headers)
|
||||
return self._request(request)
|
||||
|
||||
def _request(self, request):
|
||||
response = self.opener.open(request)
|
||||
response.status_code = response.getcode()
|
||||
response.data = response.read().decode('utf-8')
|
||||
return response
|
||||
|
||||
|
||||
def json_loads(string):
|
||||
try:
|
||||
return json.loads(string)
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def upload(data):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'url': md5sum,
|
||||
'edit_code': '',
|
||||
'text': json_data
|
||||
}
|
||||
|
||||
response = client.post('https://rentry.co/api/new', payload, headers=_headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
json_response = json_loads(response.data)
|
||||
return {
|
||||
"service": "rentry",
|
||||
"name": json_response["slug"],
|
||||
"key": f"https://rentry.co/{json_response['slug']}",
|
||||
"md5sum": md5sum
|
||||
}
|
||||
else:
|
||||
return None
|
||||
|
44
pastedb/pastedb01/services/opendev.py
Executable file
44
pastedb/pastedb01/services/opendev.py
Executable file
@ -0,0 +1,44 @@
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
URL = 'https://paste.opendev.org/json/'
|
||||
NAME = 'opendev'
|
||||
|
||||
def get_service_tag():
|
||||
return NAME
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
content = json.dumps(data)
|
||||
|
||||
# create JSON payload for new paste
|
||||
payload = {
|
||||
'language': 'text',
|
||||
'code': content,
|
||||
'private': False
|
||||
#'expire': '1day'
|
||||
}
|
||||
|
||||
# send request to create new paste
|
||||
response = requests.post(URL + '?method=pastes.newPaste', json=payload)
|
||||
|
||||
status = response.status_code
|
||||
paste_id = response.json()['data']
|
||||
|
||||
if status == 200:
|
||||
#print(f'JSON object uploaded to dpaste.com: {dpaste_url}')
|
||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||
|
||||
return {
|
||||
'service': NAME,
|
||||
'key': URL[:-6],
|
||||
'md5sum': md5sum,
|
||||
'name': NAME + '_' + paste_id + '_' + md5sum[:5]
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
||||
# we can get the paste back by:
|
||||
## $ > curl -d '{"paste_id":819463}' -H 'Content-Type: application/json' https://paste.opendev.org/json/?method=pastes.getPaste |jq .data.code
|
20
pastedb/pastedb01/services/paste2.py
Normal file
20
pastedb/pastedb01/services/paste2.py
Normal file
@ -0,0 +1,20 @@
|
||||
import requests
|
||||
|
||||
def upload(data):
|
||||
url = 'https://paste2.org/'
|
||||
|
||||
response = requests.post(url, data={'data': data})
|
||||
response.raise_for_status()
|
||||
|
||||
# Extract the URL of the uploaded paste from the response
|
||||
paste_url = None
|
||||
for line in response.text.splitlines():
|
||||
if line.startswith('<input type="text" id="paste-url" value="'):
|
||||
paste_url = line.split('"')[3]
|
||||
break
|
||||
|
||||
if paste_url is None:
|
||||
raise ValueError('Could not extract paste URL from response')
|
||||
|
||||
return paste_url
|
||||
|
32
pastedb/pastedb01/services/pastie.py
Normal file
32
pastedb/pastedb01/services/pastie.py
Normal file
@ -0,0 +1,32 @@
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
|
||||
URL = 'https://pastie.io/documents'
|
||||
NAME = 'pastie'
|
||||
|
||||
def get_service_tag():
|
||||
return NAME
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
response = requests.post('https://pastie.io/documents', data=json_data)
|
||||
if response.status_code == 200:
|
||||
key = response.json()['key']
|
||||
pastie_url = f'https://pastie.io/{key}'
|
||||
# print(f'JSON object uploaded to Pastie: {pastie_url}')
|
||||
|
||||
return {
|
||||
"service": NAME,
|
||||
"name": NAME + '_' + key,
|
||||
"key": pastie_url,
|
||||
"md5sum": md5sum
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
||||
|
22
pastedb/pastedb01/services/rentry.py
Normal file
22
pastedb/pastedb01/services/rentry.py
Normal file
@ -0,0 +1,22 @@
|
||||
import subprocess
|
||||
import json
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
|
||||
COMMAND = './external/rentry'
|
||||
NAME = 'rentry'
|
||||
|
||||
def get_service_tag():
|
||||
return NAME
|
||||
|
||||
def upload(data):
|
||||
md5sum = hashlib.md5(json.dumps(data).encode('utf-8')).hexdigest()
|
||||
command = [ COMMAND, 'new', json.dumps(data)]
|
||||
output = subprocess.check_output(command, universal_newlines=True)
|
||||
#print(output)
|
||||
lines = output.strip().split('\n')
|
||||
url = re.search("(?P<url>https?://[^\s]+)", lines[0]).group("url")
|
||||
edit_code = lines[1].split(':')[-1].strip()
|
||||
return {'name': 'rentry_' + edit_code, 'service': 'Rentry', 'key': url, 'md5sum': md5sum}
|
||||
|
45
pastedb/pastedb01/services/sprunge.py
Normal file
45
pastedb/pastedb01/services/sprunge.py
Normal file
@ -0,0 +1,45 @@
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
URL = 'http://sprunge.us'
|
||||
NAME = 'sprunge'
|
||||
|
||||
def get_service_tag():
|
||||
return NAME
|
||||
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
# Send the HTTP POST request to the Sprunge API
|
||||
response = requests.post(URL, data={'sprunge': json_data})
|
||||
if response.status_code == 200:
|
||||
|
||||
# Get the URL of the uploaded text from the response body
|
||||
sprunge_url = response.text.strip()
|
||||
|
||||
#print('Uploaded to:', sprunge_url)
|
||||
|
||||
# Use a regular expression to extract the random ID from the URL
|
||||
match = re.match(r'^http://sprunge\.us/(\w+)$', sprunge_url)
|
||||
if match:
|
||||
random_id = match.group(1)
|
||||
#print('Random ID:', random_id)
|
||||
key = "sprunge_" + random_id + '_' + md5sum[:5]
|
||||
else:
|
||||
print('Invalid Sprunge URL:', sprunge_url)
|
||||
|
||||
return {
|
||||
"service": "sprunge",
|
||||
"name": key,
|
||||
"key": sprunge_url,
|
||||
"md5sum": md5sum
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
26
pastedb/pastedb01/services/termbin.py
Normal file
26
pastedb/pastedb01/services/termbin.py
Normal file
@ -0,0 +1,26 @@
|
||||
import subprocess
|
||||
import hashlib
|
||||
import time
|
||||
import random
|
||||
import string
|
||||
|
||||
def generate_name():
|
||||
"""Generate a random name for the paste"""
|
||||
ts = int(time.time())
|
||||
rand_str = ''.join(random.choices(string.ascii_lowercase, k=5))
|
||||
name = f"termbin-{ts}-{rand_str}"
|
||||
return name
|
||||
|
||||
def upload(data):
|
||||
"""Upload the data to termbin.com"""
|
||||
name = generate_name()
|
||||
try:
|
||||
cmd = f"echo '{data}' | nc termbin.com 9999"
|
||||
response = subprocess.check_output(cmd, shell=True, timeout=5).decode()
|
||||
url = f"https://termbin.com/{name}"
|
||||
md5sum = hashlib.md5(data.encode('utf-8')).hexdigest()
|
||||
return {'service': 'termbin', 'key': url, 'md5sum': md5sum}
|
||||
except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as e:
|
||||
print(f"Upload failed with error: {e}")
|
||||
return None
|
||||
|
27
pastedb/pastedb01/upload.py
Normal file
27
pastedb/pastedb01/upload.py
Normal file
@ -0,0 +1,27 @@
|
||||
from typing import Dict
|
||||
from services import pastie, dpaste, rentry, defau, sprunge, opendev
|
||||
from data.data import add_data, save_data, set_encryption_key
|
||||
|
||||
|
||||
def upload_and_store(data: Dict):
|
||||
# set encryption key if necessary
|
||||
my_key = b'upload_id=upload_and_store(my_da'
|
||||
set_encryption_key(my_key)
|
||||
|
||||
traces = {}
|
||||
for service in [pastie, dpaste, rentry, defau, sprunge, opendev]:
|
||||
result = service.upload(data)
|
||||
if result:
|
||||
traces[result['name']] = result['key']
|
||||
add_data(service.get_service_tag(), result['name'], result['md5sum'])
|
||||
save_data('data.json', data, key=my_key)
|
||||
return traces
|
||||
|
||||
my_data = {"name": "Sashenka", "age": 26, "country": "Anguilla"}
|
||||
|
||||
|
||||
|
||||
upload_trace = upload_and_store(my_data)
|
||||
|
||||
print('trace: ', upload_trace)
|
||||
|
161
pastedb/rentry
Executable file
161
pastedb/rentry
Executable file
@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import getopt
|
||||
import http.cookiejar
|
||||
import sys
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.cookies import SimpleCookie
|
||||
from json import loads as json_loads
|
||||
from os import environ
|
||||
|
||||
_headers = {"Referer": 'https://rentry.co'}
|
||||
|
||||
|
||||
class UrllibClient:
|
||||
"""Simple HTTP Session Client, keeps cookies."""
|
||||
|
||||
def __init__(self):
|
||||
self.cookie_jar = http.cookiejar.CookieJar()
|
||||
self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookie_jar))
|
||||
urllib.request.install_opener(self.opener)
|
||||
|
||||
def get(self, url, headers={}):
|
||||
request = urllib.request.Request(url, headers=headers)
|
||||
return self._request(request)
|
||||
|
||||
def post(self, url, data=None, headers={}):
|
||||
postdata = urllib.parse.urlencode(data).encode()
|
||||
request = urllib.request.Request(url, postdata, headers)
|
||||
return self._request(request)
|
||||
|
||||
def _request(self, request):
|
||||
response = self.opener.open(request)
|
||||
response.status_code = response.getcode()
|
||||
response.data = response.read().decode('utf-8')
|
||||
return response
|
||||
|
||||
|
||||
def raw(url):
|
||||
client = UrllibClient()
|
||||
return json_loads(client.get('https://rentry.co/api/raw/{}'.format(url)).data)
|
||||
|
||||
|
||||
def new(url, edit_code, text):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'url': url,
|
||||
'edit_code': edit_code,
|
||||
'text': text
|
||||
}
|
||||
|
||||
return json_loads(client.post('https://rentry.co/api/new', payload, headers=_headers).data)
|
||||
|
||||
|
||||
def edit(url, edit_code, text):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'edit_code': edit_code,
|
||||
'text': text
|
||||
}
|
||||
|
||||
return json_loads(client.post('https://rentry.co/api/edit/{}'.format(url), payload, headers=_headers).data)
|
||||
|
||||
|
||||
def usage():
|
||||
print('''
|
||||
Usage: rentry {new | edit | raw} {-h | --help} {-u | --url} {-p | --edit-code} text
|
||||
|
||||
Commands:
|
||||
new create a new entry
|
||||
edit edit an existing entry
|
||||
raw get raw markdown text of an existing entry
|
||||
|
||||
Options:
|
||||
-h, --help show this help message and exit
|
||||
-u, --url URL url for the entry, random if not specified
|
||||
-p, --edit-code EDIT-CODE edit code for the entry, random if not specified
|
||||
|
||||
Examples:
|
||||
rentry new 'markdown text' # new entry with random url and edit code
|
||||
rentry new -p pw -u example 'text' # with custom edit code and url
|
||||
rentry edit -p pw -u example 'text' # edit the example entry
|
||||
cat FILE | rentry new # read from FILE and paste it to rentry
|
||||
cat FILE | rentry edit -p pw -u example # read from FILE and edit the example entry
|
||||
rentry raw -u example # get raw markdown text
|
||||
rentry raw -u https://rentry.co/example # -u accepts absolute and relative urls
|
||||
''')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
environ.pop('POSIXLY_CORRECT', None)
|
||||
opts, args = getopt.gnu_getopt(sys.argv[1:], "hu:p:", ["help", "url=", "edit-code="])
|
||||
except getopt.GetoptError as e:
|
||||
sys.exit("error: {}".format(e))
|
||||
|
||||
command, url, edit_code, text = None, '', '', None
|
||||
|
||||
for o, a in opts:
|
||||
if o in ("-h", "--help"):
|
||||
usage()
|
||||
sys.exit()
|
||||
elif o in ("-u", "--url"):
|
||||
url = urllib.parse.urlparse(a).path.strip('/')
|
||||
elif o in ("-p", "--edit-code"):
|
||||
edit_code = a
|
||||
|
||||
command = (args[0:1] or [None])[0]
|
||||
command or sys.exit(usage())
|
||||
command in ['new', 'edit', 'raw'] or sys.exit('error: command must be new, edit or raw')
|
||||
|
||||
text = (args[1:2] or [None])[0]
|
||||
if not text and command != 'raw':
|
||||
text = sys.stdin.read().strip()
|
||||
text or sys.exit('error: text is required')
|
||||
|
||||
if command == 'new':
|
||||
response = new(url, edit_code, text)
|
||||
if response['status'] != '200':
|
||||
print('error: {}'.format(response['content']))
|
||||
try:
|
||||
for i in response['errors'].split('.'):
|
||||
i and print(i)
|
||||
sys.exit(1)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Url: {}\nEdit code: {}'.format(response['url'], response['edit_code']))
|
||||
|
||||
elif command == 'edit':
|
||||
url or sys.exit('error: url is required')
|
||||
edit_code or sys.exit('error: edit code is required')
|
||||
|
||||
response = edit(url, edit_code, text)
|
||||
if response['status'] != '200':
|
||||
print('error: {}'.format(response['content']))
|
||||
try:
|
||||
for i in response['errors'].split('.'):
|
||||
i and print(i)
|
||||
sys.exit(1)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Ok')
|
||||
|
||||
elif command == 'raw':
|
||||
url or sys.exit('error: url is required')
|
||||
response = raw(url)
|
||||
if response['status'] != '200':
|
||||
sys.exit('error: {}'.format(response['content']))
|
||||
print(response['content'])
|
34
pastedb/rentry_upload.py
Normal file
34
pastedb/rentry_upload.py
Normal file
@ -0,0 +1,34 @@
|
||||
import hashlib
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
def upload(data):
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
# Run rentry client as subprocess
|
||||
p = subprocess.Popen(
|
||||
['./services/rentry', 'new', {json_data}],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
# Send JSON data to rentry client
|
||||
stdout, stderr = p.communicate(json_data.encode('utf-8'))
|
||||
|
||||
if p.returncode != 0:
|
||||
# Rentry client failed
|
||||
print(f"Error running rentry client: {stderr.decode('utf-8')}")
|
||||
return None
|
||||
|
||||
# Parse JSON response from rentry client
|
||||
json_response = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
return {
|
||||
"service": "rentry",
|
||||
"name": json_response["slug"],
|
||||
"key": f"https://rentry.co/{json_response['slug']}",
|
||||
"md5sum": md5sum
|
||||
}
|
||||
|
BIN
pastedb/solomon/cat.png
Normal file
BIN
pastedb/solomon/cat.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 6.9 MiB |
1
pastedb/solomon/my_key.key
Normal file
1
pastedb/solomon/my_key.key
Normal file
@ -0,0 +1 @@
|
||||
f96404341e07a433c8e46612c16cd311de6f63cecc98bb6c799c1c79178bbfec3c07d106f3ff7426b39bb9fcc63c7d03533b9494f54bfa015240cbfdf3ea2274
|
138
pastedb/solomon/solomon-aes-tesing.py
Executable file
138
pastedb/solomon/solomon-aes-tesing.py
Executable file
@ -0,0 +1,138 @@
|
||||
import reedsolo
|
||||
from Cryptodome.Cipher import AES
|
||||
from Cryptodome.Random import get_random_bytes
|
||||
from Cryptodome.Protocol.KDF import scrypt
|
||||
from Cryptodome.Util.Padding import pad
|
||||
|
||||
# Parameters for key derivation function (KDF)
|
||||
KDF_SALT = b"salt_for_key_derivation"
|
||||
KDF_N = 2 ** 14 # CPU/memory cost parameter for scrypt KDF
|
||||
KDF_r = 8 # Block size parameter for scrypt KDF
|
||||
KDF_p = 1 # Parallelization parameter for scrypt KDF
|
||||
KDF_KEY_LEN = 32 # Length of derived encryption key
|
||||
|
||||
|
||||
def encrypt_data(data, number, password=None, keyfile=None):
|
||||
"""
|
||||
Encrypts data using Reed-Solomon coding and AES encryption.
|
||||
|
||||
Args:
|
||||
data (bytes): The data to encrypt.
|
||||
number (int): The number of pieces to split the data into using Reed-Solomon coding.
|
||||
password (bytes or None): The password to use for key derivation using the scrypt KDF. If None,
|
||||
the key is read from the file specified by keyfile.
|
||||
keyfile (str or None): The path to the file containing the encryption key. If None, the key
|
||||
is derived from the password using the scrypt KDF.
|
||||
Returns:
|
||||
A list of bytes objects, each of which is an encrypted piece of the input data.
|
||||
Raises:
|
||||
ValueError: If the data cannot be split into the requested number of pieces using Reed-Solomon
|
||||
coding, or if an error occurs during key derivation.
|
||||
"""
|
||||
# Use Reed-Solomon to split data into number pieces
|
||||
reed_rs = reedsolo.RSCodec(number)
|
||||
try:
|
||||
data_pieces = reed_rs.encode(data)
|
||||
except reedsolo.ReedSolomonError as reed_error:
|
||||
raise ValueError("Error during Reed-Solomon encoding: {}".format(str(reed_error)))
|
||||
|
||||
# Derive encryption key from password using scrypt KDF, or read from file if specified
|
||||
if keyfile is None:
|
||||
try:
|
||||
key = scrypt(password, KDF_SALT, KDF_KEY_LEN, N=KDF_N, r=KDF_r, p=KDF_p)
|
||||
except ValueError as reed_error:
|
||||
raise ValueError("Error during key derivation: {}".format(str(reed_error)))
|
||||
else:
|
||||
key = read_key_from_file(keyfile)
|
||||
|
||||
# Encrypt each piece using AES in GCM mode with derived key
|
||||
encrypted_pieces = []
|
||||
for piece in data_pieces:
|
||||
aes = AES.new(key, AES.MODE_GCM, nonce=get_random_bytes(12))
|
||||
ciphertext, tag = aes.encrypt_and_digest(pad(piece, AES.block_size))
|
||||
encrypted_pieces.append(ciphertext)
|
||||
|
||||
return encrypted_pieces
|
||||
|
||||
|
||||
def read_key_from_file(keyfile):
|
||||
"""
|
||||
Reads an encryption key from a file.
|
||||
Args:
|
||||
keyfile (str): The path to the file containing the encryption key.
|
||||
Returns:
|
||||
A bytes object containing the encryption key.
|
||||
Raises:
|
||||
ValueError: If the key file does not exist or the key length is invalid.
|
||||
"""
|
||||
with open(keyfile, "rb") as key_file:
|
||||
key = key_file.read().strip()
|
||||
if len(key) != KDF_KEY_LEN:
|
||||
raise ValueError("Invalid key length")
|
||||
return key
|
||||
|
||||
|
||||
def decrypt_data(data_pieces, key, keyfile=None, password=None):
|
||||
"""
|
||||
Decrypts a list of encrypted data pieces using AES in CBC mode with PKCS7 padding,
|
||||
and returns the original data by reassembling the pieces using Reed-Solomon decoding.
|
||||
|
||||
Args:
|
||||
data_pieces (list of bytes): The list of encrypted data pieces to decrypt and reassemble.
|
||||
key (bytes): The encryption key to use for decrypting the data pieces.
|
||||
keyfile (str, optional): Path to a file containing the encryption key. If specified,
|
||||
the key will be read from this file instead of the `key` argument.
|
||||
password (str, optional): Password to use for decrypting the encryption key. If
|
||||
specified, the password will be used to derive the key from the keyfile.
|
||||
Returns:
|
||||
bytes: The original data obtained by reassembling the decrypted data pieces using
|
||||
Reed-Solomon decoding.
|
||||
Raises:
|
||||
ValueError: If both `key` and `keyfile` arguments are None, or if both are specified.
|
||||
IOError: If the keyfile cannot be read.
|
||||
ValueError: If the password is incorrect or the keyfile does not contain a valid key.
|
||||
"""
|
||||
# Determine the encryption key
|
||||
if keyfile is not None and key is None:
|
||||
with open(keyfile, "rb") as f:
|
||||
key = f.read()
|
||||
if password is not None:
|
||||
key = scrypt(
|
||||
password.encode(), KDF_SALT, KDF_KEY_LEN, N=KDF_N, r=KDF_r, p=KDF_p
|
||||
)
|
||||
if key != fernet.decrypt(key):
|
||||
raise ValueError("Incorrect password or invalid key file")
|
||||
elif key is not None and keyfile is None:
|
||||
pass
|
||||
else:
|
||||
raise ValueError("Must specify either key or keyfile, but not both")
|
||||
|
||||
# Decrypt each data piece and reassemble the original data
|
||||
piece_size = len(data_pieces[0])
|
||||
decoded_pieces = reedsolo.RSCodec(len(data_pieces)).decode(data_pieces)
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv=decoded_pieces[0][:16])
|
||||
decrypted_data = b""
|
||||
for piece in decoded_pieces:
|
||||
decrypted_data += unpad(cipher.decrypt(piece), AES.block_size)
|
||||
return decrypted_data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Read encryption key from file
|
||||
key = read_key_from_file("./my_key.key")
|
||||
|
||||
# Read data from file
|
||||
with open("./cat.png", "rb") as f:
|
||||
data = f.read()
|
||||
|
||||
# Encrypt data and get nonce and encrypted pieces
|
||||
nonce, encrypted_pieces = encrypt_data(data, 6, keyfile="./my_key.key")
|
||||
|
||||
# Decrypt data pieces
|
||||
decrypted_data = decrypt_data(
|
||||
[nonce] + encrypted_pieces, key, keyfile="./my_key.key"
|
||||
)
|
||||
|
||||
# Write decrypted data to file
|
||||
with open("restored_cat.png", "wb") as f:
|
||||
f.write(decrypted_data)
|
38
pastedb/solomon/solomon-reassemble.py
Normal file
38
pastedb/solomon/solomon-reassemble.py
Normal file
@ -0,0 +1,38 @@
|
||||
import os
|
||||
import argparse
|
||||
from typing import List
|
||||
from reedsolo import RSCodec
|
||||
|
||||
def parse_arguments():
|
||||
parser = argparse.ArgumentParser(description='Reassemble file from multiple Reed-Solomon encoded pieces.')
|
||||
parser.add_argument('input_files', type=str, nargs='+', help='List of files containing the encoded pieces, in order.')
|
||||
return parser.parse_args()
|
||||
|
||||
def reassemble_file(input_files: List[str]):
|
||||
# Load the encoded pieces into memory
|
||||
encoded_pieces = []
|
||||
for input_file in input_files[:-1]:
|
||||
with open(input_file, 'rb') as f:
|
||||
encoded_piece_data = f.read()
|
||||
encoded_piece = bytearray(encoded_piece_data)
|
||||
print(f"Read {len(encoded_piece)} bytes from {input_file}") # Print the size of the bytearray
|
||||
encoded_pieces.append(encoded_piece)
|
||||
|
||||
# Decode the pieces using Reed-Solomon coding
|
||||
n = len(encoded_pieces)
|
||||
codec = RSCodec(n)
|
||||
decoded_pieces = codec.decode(encoded_pieces)
|
||||
|
||||
# Concatenate the decoded pieces into the original file data
|
||||
file_data = b''.join(decoded_pieces)
|
||||
|
||||
# Write the original file data to disk
|
||||
output_file_path = input_files[-1]
|
||||
with open(output_file_path, 'wb') as f:
|
||||
f.write(file_data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = parse_arguments()
|
||||
reassemble_file(args.input_files)
|
||||
|
63
pastedb/solomon/solomon-test.py
Executable file
63
pastedb/solomon/solomon-test.py
Executable file
@ -0,0 +1,63 @@
|
||||
import os
|
||||
import argparse
|
||||
from math import ceil
|
||||
from pydantic import BaseModel
|
||||
from typing import List
|
||||
from reedsolo import RSCodec
|
||||
|
||||
class Args(BaseModel):
|
||||
file_path: str
|
||||
n: int
|
||||
|
||||
def parse_arguments():
|
||||
parser = argparse.ArgumentParser(description='Split file into multiple pieces using Reed-Solomon coding.')
|
||||
parser.add_argument('file_path', type=str, help='Path to the input file.')
|
||||
parser.add_argument('n', type=int, help='Number of pieces to split the file into.')
|
||||
return Args(**vars(parser.parse_args()))
|
||||
|
||||
def split_file_into_pieces(file_path: str, n: int):
|
||||
# Load the file into memory
|
||||
with open(file_path, 'rb') as f:
|
||||
file_data = f.read()
|
||||
|
||||
# Calculate the size of each piece
|
||||
piece_size = ceil(len(file_data) / n)
|
||||
|
||||
# Create the folders for the pieces
|
||||
folder_names = [f'piece_{i+1}' for i in range(n)]
|
||||
for folder_name in folder_names:
|
||||
os.makedirs(folder_name, exist_ok=True)
|
||||
|
||||
# Encode each piece using Reed-Solomon coding and write it to the corresponding folder
|
||||
codec = RSCodec(n)
|
||||
for i, folder_name in enumerate(folder_names):
|
||||
piece_data = file_data[i*piece_size:(i+1)*piece_size]
|
||||
encoded_data = codec.encode(piece_data)
|
||||
with open(os.path.join(folder_name, f'{i+1}.dat'), 'wb') as f:
|
||||
f.write(encoded_data)
|
||||
|
||||
def reassemble_file(folder_names: List[str], output_file_path: str):
|
||||
# Load the encoded pieces into memory
|
||||
encoded_pieces = []
|
||||
for i, folder_name in enumerate(folder_names):
|
||||
with open(os.path.join(folder_name, f'{i+1}.dat'), 'rb') as f:
|
||||
encoded_piece_data = f.read()
|
||||
encoded_pieces.append(encoded_piece_data)
|
||||
|
||||
# Decode the pieces using Reed-Solomon coding
|
||||
codec = RSCodec(len(encoded_pieces))
|
||||
decoded_pieces = codec.decode(encoded_pieces)
|
||||
|
||||
# Concatenate the decoded pieces into the original file data
|
||||
file_data = b''.join(decoded_pieces)
|
||||
|
||||
# Write the original file data to disk
|
||||
with open(output_file_path, 'wb') as f:
|
||||
f.write(file_data)
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = parse_arguments()
|
||||
split_file_into_pieces(args.file_path, args.n)
|
||||
folder_names = [f'piece_{i+1}' for i in range(args.n)]
|
||||
reassemble_file(folder_names, 'reconstructed_file.png')
|
||||
|
8
python3-ecdsa-ecdh-tests.py
Executable file
8
python3-ecdsa-ecdh-tests.py
Executable file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import ecdsa
|
||||
|
||||
ecdh = ecdsa.ECDH(curve=ecdsa.SECP256k1)
|
||||
private_key = ecdh.generate_private_key()
|
||||
|
||||
print(str(private_key))
|
31
shashasha.py
Executable file
31
shashasha.py
Executable file
@ -0,0 +1,31 @@
|
||||
#!/usr/bin/python3.10
|
||||
"""
|
||||
This is simple script that gives out quite random chars.
|
||||
Like we almost won eurovision.
|
||||
"""
|
||||
|
||||
from hashlib import sha256
|
||||
from random import randint
|
||||
|
||||
runs = 400
|
||||
turn = 0
|
||||
start = "Hello There, friend!"
|
||||
|
||||
|
||||
def turns_and_runs(turn, input, end):
|
||||
input_bytes = input.encode()
|
||||
input_hash = sha256(input_bytes)
|
||||
return_blob = input_hash.hexdigest()
|
||||
|
||||
if turn < runs:
|
||||
my_random = randint(10, 60)
|
||||
end += return_blob[my_random]
|
||||
return_blob = return_blob + return_blob + str(my_random)
|
||||
end = turns_and_runs(turn + 1, return_blob, end)
|
||||
|
||||
return end
|
||||
|
||||
|
||||
result = turns_and_runs(turn, start, "")
|
||||
|
||||
print(result)
|
25
webpage-stuff/button-example.html
Normal file
25
webpage-stuff/button-example.html
Normal file
@ -0,0 +1,25 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
textarea{
|
||||
overflow: hidden !important;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
function buttonPressed() {
|
||||
var inputText = document.getElementById("inputText").value;
|
||||
var textBox = document.getElementById("myTextBox");
|
||||
textBox.value += inputText + "\n";
|
||||
textBox.scrollTop = textBox.scrollHeight;
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<textarea id="myTextBox" rows="4" cols="50" style="overflow:auto"></textarea>
|
||||
<br>
|
||||
<input type="text" id="inputText">
|
||||
<button onclick="buttonPressed()">Press me</button>
|
||||
</body>
|
||||
</html>
|
||||
|
Reference in New Issue
Block a user