Compare commits
No commits in common. "main" and "master" have entirely different histories.
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
*.db
|
||||
*.log
|
||||
*/__pycache__/*
|
9
LICENSE
9
LICENSE
@ -1,9 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) <year> <copyright holders>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
23
README.md
23
README.md
@ -1,3 +1,22 @@
|
||||
# chatgpt
|
||||
# Coding with ChatGTP
|
||||
|
||||
This repository contains my journey to write code with chatgpt, chat.openai.com/chat.
|
||||
On this repository is my random coding done typically late at night when I'm too tired to do anything else... :)
|
||||
|
||||
## some pointters:
|
||||
|
||||
- btc_tracker
|
||||
1. Server/fetcher: fetching bitcoin ohlc data from multiple sources and offer it forward with simple api.
|
||||
2. Client: Fetch data from the server and show it via gnuplot chart.
|
||||
|
||||
- btc_wallet
|
||||
Simple python3 script's to create bitcoin wallet and storing it on KeePassXC database.
|
||||
Still planning to create ondemand key generation and transaction signing.
|
||||
|
||||
- pastedb
|
||||
Trying to make simple database on top of pastebin like services.
|
||||
|
||||
- encryption-on-apis & webapp & webpage-stuff
|
||||
Some random testing about creating simple and secure web services.
|
||||
|
||||
- letters
|
||||
Saved ChatGPT chats and messages
|
||||
|
26
TryUTF8Read.py
Executable file
26
TryUTF8Read.py
Executable file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/python3
|
||||
import sys
|
||||
import codecs
|
||||
|
||||
def test_utf8_encoding(filepath):
|
||||
try:
|
||||
with codecs.open(filepath, 'r', 'utf-8') as testfile:
|
||||
testfile.read()
|
||||
#print(f"{filepath} is openable with UTF-8 encoding.")
|
||||
except UnicodeDecodeError:
|
||||
print(f"{filepath} is not openable with UTF-8 encoding.")
|
||||
print(f"Converting {filepath} from ISO-8859-1 to UTF-8...")
|
||||
with codecs.open(filepath, 'r', 'iso-8859-1') as f:
|
||||
content = f.read()
|
||||
utf8_content = content.decode('iso-8859-1').encode('utf-8')
|
||||
with codecs.open(filepath, 'w', 'utf-8') as f:
|
||||
f.write(utf8_content)
|
||||
print(f"{filepath} has been converted to UTF-8.")
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Please provide a file name as a command line argument.")
|
||||
sys.exit(1)
|
||||
|
||||
filepath = sys.argv[1]
|
||||
test_utf8_encoding(filepath)
|
||||
|
51
async-rsa-encryption.py
Normal file
51
async-rsa-encryption.py
Normal file
@ -0,0 +1,51 @@
|
||||
#!/usr/bin/python3
|
||||
import json
|
||||
import base64
|
||||
import asyncio
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
|
||||
|
||||
# Generate a new RSA key pair
|
||||
private_key = rsa.generate_private_key()
|
||||
public_key = private_key.public_key()
|
||||
|
||||
|
||||
# Asynchronously encrypt and decrypt a JSON document
|
||||
async def encrypt_decrypt_json(json_data: dict) -> dict:
|
||||
# Convert the JSON data to a string
|
||||
json_str = json.dumps(json_data)
|
||||
|
||||
# Encode the JSON string as bytes
|
||||
json_bytes = json_str.encode()
|
||||
|
||||
# Encrypt the JSON bytes using Fernet
|
||||
fernet = Fernet(base64.urlsafe_b64encode(public_key.public_bytes(
|
||||
encoding=rsa.Encoding.DER,
|
||||
format=rsa.PublicFormat.SubjectPublicKeyInfo
|
||||
)))
|
||||
encrypted_json_bytes = fernet.encrypt(json_bytes)
|
||||
|
||||
# Decrypt the encrypted JSON bytes using Fernet
|
||||
decrypted_json_bytes = fernet.decrypt(encrypted_json_bytes)
|
||||
|
||||
# Decode the decrypted JSON bytes back to a string
|
||||
decrypted_json_str = decrypted_json_bytes.decode()
|
||||
|
||||
# Convert the decrypted JSON string back to a dictionary
|
||||
decrypted_json_data = json.loads(decrypted_json_str)
|
||||
|
||||
return decrypted_json_data
|
||||
|
||||
|
||||
# Example usage
|
||||
json_data = {
|
||||
"user": "johnsmith",
|
||||
"password": "correcthorsebatterystaple"
|
||||
}
|
||||
|
||||
# Asynchronously encrypt and decrypt the JSON data
|
||||
decrypted_json_data = asyncio.run(encrypt_decrypt_json(json_data))
|
||||
|
||||
# Print the decrypted JSON
|
||||
|
104
bitcoin-price-database.py
Executable file
104
bitcoin-price-database.py
Executable file
@ -0,0 +1,104 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
import json
|
||||
import sqlite3
|
||||
import requests
|
||||
import matplotlib
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.animation as animation
|
||||
import matplotlib.style as style
|
||||
|
||||
def Checkthedatabase():
|
||||
## Some sanity for the database
|
||||
# check if btc_timeseries.db database file exists
|
||||
if not os.path.exists("btc_timeseries.db"):
|
||||
db = sqlite3.connect("btc_timeseries.db")
|
||||
db.execute("CREATE TABLE timeseries (timestamp INTEGER, value REAL)")
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
db = sqlite3.connect("btc_timeseries.db")
|
||||
|
||||
# Check if the table exists
|
||||
table_exists = False
|
||||
cursor = db.execute("PRAGMA table_info(timeseries)")
|
||||
for row in cursor:
|
||||
table_exists = True
|
||||
|
||||
# Create the table if it doesn't exist
|
||||
if not table_exists:
|
||||
db.execute("CREATE TABLE timeseries (timestamp INTEGER, value REAL)")
|
||||
db.commit()
|
||||
|
||||
def Getdata():
|
||||
#fetch the price data
|
||||
payload = {'symbol': 'BTCUSDT'}
|
||||
response = requests.get('https://api.binance.com/api/v3/avgPrice', params=payload)
|
||||
|
||||
#get the usd_value
|
||||
json_data = response.json()
|
||||
usd_value = json_data['price']
|
||||
|
||||
### Insert the USD value into the database
|
||||
db.execute("INSERT INTO timeseries (timestamp, value) VALUES (datetime('now'), ?)", (usd_value,))
|
||||
|
||||
## Save the changes to the database
|
||||
db.commit()
|
||||
#print(db.execute("SELECT * FROM timeseries"))
|
||||
|
||||
#update the graph
|
||||
def Updategraph(num):
|
||||
cursor.execute("SELECT timestamp, value FROM timeseries WHERE timestamp > datetime('now', '-10 second')")
|
||||
stuff = cursor.fetchall()
|
||||
|
||||
timestamps = [row[0] for row in stuff]
|
||||
values = [row[1] for row in stuff]
|
||||
|
||||
line.set_data(timestamps, values)
|
||||
ax.relim()
|
||||
ax.autoscale_view()
|
||||
Getdata()
|
||||
|
||||
Checkthedatabase()
|
||||
|
||||
db = sqlite3.connect("btc_timeseries.db")
|
||||
Getdata()
|
||||
|
||||
##some styling for the plot
|
||||
style.use('dark_background')
|
||||
|
||||
colors = {
|
||||
'figure.facecolor': '#222222',
|
||||
'axes.facecolor': '#222222',
|
||||
'axes.edgecolor': '#FFFFFF',
|
||||
'axes.labelcolor': '#FFFFFF',
|
||||
'grid.color': '#444444',
|
||||
'grid.linestyle': 'dotted',
|
||||
'lines.color': '#FFFFFF'
|
||||
}
|
||||
matplotlib.rcParams.update(colors)
|
||||
|
||||
# Create a figure and axes for the plot
|
||||
fig, ax = plt.subplots()
|
||||
|
||||
#query database for the data
|
||||
cursor = db.execute("SELECT timestamp, value FROM timeseries")
|
||||
|
||||
stuff = cursor.fetchall()
|
||||
# Extract the timestamp and value columns from the query result
|
||||
timestamps = [row[0] for row in stuff]
|
||||
values = [row[1] for row in stuff]
|
||||
|
||||
# Create a line plot using the time series data
|
||||
line, = ax.plot(timestamps, values)
|
||||
plt.plot(timestamps, values)
|
||||
|
||||
# Create an animation using the update function
|
||||
ani = animation.FuncAnimation(fig, Updategraph, interval=60000)
|
||||
|
||||
# Show the plot
|
||||
plt.show()
|
||||
|
||||
db.close()
|
||||
exit(0)
|
BIN
btc_timeseries.db
Normal file
BIN
btc_timeseries.db
Normal file
Binary file not shown.
5
btc_tracker.py
Normal file
5
btc_tracker.py
Normal file
@ -0,0 +1,5 @@
|
||||
import fetcher
|
||||
|
||||
fetcher.start()
|
||||
|
||||
exit(0)
|
20
btc_tracker/01042023/TheClient/bin/TheClient.py
Normal file
20
btc_tracker/01042023/TheClient/bin/TheClient.py
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
import os
|
||||
import configparser
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from TheClient.database.db_utils import Database
|
||||
from TheClient.graphing.graph_utils import Graph
|
||||
|
||||
def load_config(config_file):
|
||||
config = configparser.ConfigParser()
|
||||
config.read(config_file)
|
||||
return config
|
||||
|
||||
def get_btc_price():
|
||||
response = requests.get('https://api.binance.com/api/v3/avgPrice', params={'symbol': 'BTCUSDT'})
|
||||
json_data = response.json()
|
||||
return float(json_data['price'])
|
||||
|
||||
def main():
|
||||
config = load_config(os.path.join(os.path.dirname(__file__), '..', 'config', 'The
|
0
btc_tracker/01042023/TheClient/database/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/database/__init__.py
Normal file
27
btc_tracker/01042023/TheClient/database/db_utils.py
Normal file
27
btc_tracker/01042023/TheClient/database/db_utils.py
Normal file
@ -0,0 +1,27 @@
|
||||
import sqlite3
|
||||
|
||||
class Database():
|
||||
def __init__(self, db_file):
|
||||
self.db_file = db_file
|
||||
self._create_table()
|
||||
|
||||
def _create_table(self):
|
||||
with sqlite3.connect(self.db_file) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS timeseries
|
||||
(timestamp INTEGER PRIMARY KEY, value REAL)''')
|
||||
conn.commit()
|
||||
|
||||
def insert_data(self, timestamp, value):
|
||||
with sqlite3.connect(self.db_file) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''INSERT INTO timeseries (timestamp, value)
|
||||
VALUES (?, ?)''', (timestamp, value))
|
||||
conn.commit()
|
||||
|
||||
def fetch_data(self, limit):
|
||||
with sqlite3.connect(self.db_file) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''SELECT timestamp, value FROM timeseries
|
||||
ORDER BY timestamp DESC LIMIT ?''', (limit,))
|
||||
return cursor.fetchall()
|
0
btc_tracker/01042023/TheClient/fetch/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/fetch/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/graphing/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/graphing/__init__.py
Normal file
12
btc_tracker/01042023/TheClient/graphing/graph_utils.py
Normal file
12
btc_tracker/01042023/TheClient/graphing/graph_utils.py
Normal file
@ -0,0 +1,12 @@
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
class Graph():
|
||||
def __init__(self, xdata, ydata):
|
||||
self.fig, self.ax = plt.subplots()
|
||||
self.line, = self.ax.plot(xdata, ydata)
|
||||
|
||||
def update_graph(self, xdata, ydata):
|
||||
self.line.set_data(xdata, ydata)
|
||||
self.ax.relim()
|
||||
self.ax.autoscale_view()
|
||||
self.fig.canvas.draw()
|
2
btc_tracker/01042023/TheClient/requirements.txt
Normal file
2
btc_tracker/01042023/TheClient/requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
requests==2.25.1
|
||||
matplotlib==3.6.2
|
0
btc_tracker/01042023/TheClient/tests/__init__.py
Normal file
0
btc_tracker/01042023/TheClient/tests/__init__.py
Normal file
BIN
btc_tracker/database
Normal file
BIN
btc_tracker/database
Normal file
Binary file not shown.
16
btc_tracker/fetch_data.py
Executable file
16
btc_tracker/fetch_data.py
Executable file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/python3
|
||||
import json, time
|
||||
|
||||
# Load the JSON file
|
||||
with open('sources.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Iterate over the exchanges
|
||||
for exchange in data['exchanges']:
|
||||
# Print the name and URL of the exchange
|
||||
if exchange['name'] == "Kraken":
|
||||
current_time = int(time.time()) - 300
|
||||
exchange['url'] += f"&since={current_time}"
|
||||
print(exchange['name'], exchange['url'])
|
||||
else:
|
||||
print(exchange['name'], exchange['url'])
|
228
btc_tracker/fetcher.py
Executable file
228
btc_tracker/fetcher.py
Executable file
@ -0,0 +1,228 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import krakenex
|
||||
import json, sqlite3
|
||||
import requests, os, time
|
||||
import threading
|
||||
|
||||
database = "btc_ohlc.db"
|
||||
|
||||
def Checkthedatabase():
|
||||
## Some sanity for the database
|
||||
# check if btc_timeseries.db database file exists
|
||||
if not os.path.exists(database):
|
||||
db = sqlite3.connect(database)
|
||||
|
||||
db.execute("""\
|
||||
CREATE TABLE ohlc (
|
||||
id INTEGER PRIMARY KEY,
|
||||
exchange TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
open REAL NOT NULL,
|
||||
high REAL NOT NULL,
|
||||
low REAL NOT NULL,
|
||||
close REAL NOT NULL,
|
||||
volume_quote REAL NOT NULL,
|
||||
volume_base REAL NOT NULL,
|
||||
trades INTEGER NOT NULL )""")
|
||||
|
||||
db.commit()
|
||||
db.close()
|
||||
|
||||
db = sqlite3.connect(database)
|
||||
|
||||
# Check if the table exists
|
||||
table_exists = False
|
||||
cursor = db.execute("PRAGMA table_info(ohlc)")
|
||||
for row in cursor:
|
||||
table_exists = True
|
||||
|
||||
# Create the table if it doesn't exist
|
||||
if not table_exists:
|
||||
db.execute("""\
|
||||
CREATE TABLE ohlc (
|
||||
id INTEGER PRIMARY KEY,
|
||||
exchange TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
open REAL NOT NULL,
|
||||
high REAL NOT NULL,
|
||||
low REAL NOT NULL,
|
||||
close REAL NOT NULL,
|
||||
volume_quote REAL NOT NULL,
|
||||
volume_base REAL NOT NULL,
|
||||
trades INTEGER NOT NULL )""")
|
||||
db.commit()
|
||||
|
||||
def fetch_kraken():
|
||||
### Kraken
|
||||
kraken = krakenex.API()
|
||||
|
||||
response = kraken.query_public('OHLC', {'pair': 'BTCUSD', 'interval': 240 })
|
||||
ohlc_data = response['result']['XXBTZUSD']
|
||||
|
||||
candle_stick_data = {
|
||||
'exchange': 'kraken',
|
||||
'timestamp': ohlc_data[1][0],
|
||||
'open': ohlc_data[0][1],
|
||||
'high': max(item[2] for item in ohlc_data),
|
||||
'low': min(item[3] for item in ohlc_data),
|
||||
'close': ohlc_data[-1][4],
|
||||
'volume_quote': sum(float(item[5]) for item in ohlc_data),
|
||||
'volume_base': sum(float(item[6]) for item in ohlc_data),
|
||||
'trades': sum(item[7] for item in ohlc_data),
|
||||
}
|
||||
|
||||
kraken_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Kraken: OK")
|
||||
#print(kraken_json)
|
||||
#q.put("Kraken: OK")
|
||||
return kraken_json
|
||||
|
||||
def fetch_bitstamp(q):
|
||||
## Bitstamp
|
||||
response = requests.get("https://www.bitstamp.net/api/v2/ohlc/btcusd/?step=300&limit=1")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
bitstamp_data = response.json()
|
||||
ohlc_data = bitstamp_data["data"]["ohlc"]
|
||||
|
||||
candle_stick_data = {
|
||||
'exchange': 'bitstamp',
|
||||
'timestamp': int(ohlc_data[0]['timestamp']),
|
||||
'open': float(ohlc_data[0]['open']),
|
||||
'high': float(ohlc_data[0]['high']),
|
||||
'low': float(ohlc_data[0]['low']),
|
||||
'close': float(ohlc_data[0]['close']),
|
||||
'volume_quote': float(ohlc_data[0]['volume']),
|
||||
'volume_base': 0, # not provided by Bitstamp API
|
||||
'trades': 0, # not provided by Bitstamp API
|
||||
}
|
||||
|
||||
bitstamp_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Bitstamp: OK")
|
||||
#print(bitstamp_json)
|
||||
#q.put("Bitstamp: OK")
|
||||
return bitstamp_json
|
||||
else:
|
||||
print(f"Error fetching data from Bitstamp API: {response.status_code}")
|
||||
q.put("Bitstamp: ERROR")
|
||||
return empty_json
|
||||
|
||||
def fetch_bitfinex(q):
|
||||
## Bitfinex
|
||||
response = requests.get("https://api-pub.bitfinex.com/v2/candles/trade:5m:tBTCUSD/last")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
ohlc_data = response.json()
|
||||
candle_stick_data = {
|
||||
'exchange': 'bitfinex',
|
||||
'timestamp': ohlc_data[0],
|
||||
'open': ohlc_data[1],
|
||||
'high': ohlc_data[2],
|
||||
'low': ohlc_data[3],
|
||||
'close': ohlc_data[4],
|
||||
'volume_quote': ohlc_data[5],
|
||||
'volume_base': 0, # not provided by Bitfinex API
|
||||
'trades': 0, # not provided by Bitfinex API
|
||||
}
|
||||
|
||||
bitfinex_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Bitfinex: OK")
|
||||
#print(bitfinex_json)
|
||||
#q.put("Bitfinex: OK")
|
||||
return bitfinex_json
|
||||
else:
|
||||
print(f"Error fetching data from Bitfinex API: {response.status_code}")
|
||||
q.put("Bitfinex: ERROR")
|
||||
return empty_json
|
||||
|
||||
def fetch_gemini(q):
|
||||
## Gemini
|
||||
response = requests.get("https://api.gemini.com/v2/candles/btcusd/5m")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
gemini_ohlc = response.json()
|
||||
candle_stick_data = {
|
||||
'exchange': 'gemini',
|
||||
'timestamp': gemini_ohlc[0][0],
|
||||
'open': gemini_ohlc[0][1],
|
||||
'high': gemini_ohlc[0][2],
|
||||
'low': gemini_ohlc[0][3],
|
||||
'close': gemini_ohlc[0][4],
|
||||
'volume_quote': 0, # not provided by Gemini API
|
||||
'volume_base': gemini_ohlc[0][5],
|
||||
'trades': 0, # not provided by Gemini API
|
||||
}
|
||||
gemini_json = json.dumps(candle_stick_data, indent=2)
|
||||
#print("Gemini: OK")
|
||||
#print(gemini_json)
|
||||
#q.put("Gemini: OK")
|
||||
return gemini_json
|
||||
else:
|
||||
print(f"Error fetching data from Gemini API: {response.status_code}")
|
||||
q.put("Gemini: ERROR")
|
||||
return empty_json
|
||||
|
||||
def write_dict_to_database(in_dict, connection):
|
||||
cursor = connection.cursor()
|
||||
# Use placeholders for the values in the INSERT statement
|
||||
insert_query = "INSERT INTO ohlc (exchange, timestamp, open, high, low, close, volume_quote, volume_base, trades) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
values = (in_dict['exchange'],
|
||||
in_dict['timestamp'],
|
||||
in_dict['open'],
|
||||
in_dict['high'],
|
||||
in_dict['low'],
|
||||
in_dict['close'],
|
||||
in_dict['volume_quote'],
|
||||
in_dict['volume_base'],
|
||||
in_dict['trades'])
|
||||
## apply lock while writing to database
|
||||
with database_lock:
|
||||
cursor.execute(insert_query, values)
|
||||
connection.commit()
|
||||
|
||||
def get_the_data(q):
|
||||
#cursor = db.cursor()
|
||||
while True:
|
||||
#logline = "Fetching at: " + str(time.time())
|
||||
#q.put(logline)
|
||||
db = sqlite3.connect(database)
|
||||
write_dict_to_database(json.loads(fetch_kraken()), db)
|
||||
write_dict_to_database(json.loads(fetch_bitfinex(q)), db)
|
||||
write_dict_to_database(json.loads(fetch_bitstamp(q)), db)
|
||||
write_dict_to_database(json.loads(fetch_gemini(q)), db)
|
||||
db.close()
|
||||
time.sleep(290)
|
||||
|
||||
|
||||
Checkthedatabase()
|
||||
|
||||
# Empty response json
|
||||
empty_dict = {"exchange": "", "timestamp": 0, "open": 0, "high": 0, "low": 0, "close": 0, "volume_quote": 0, "volume_base": 0, "trades": 0}
|
||||
empty_json = json.dumps(empty_dict)
|
||||
database_lock = threading.Lock()
|
||||
|
||||
fetch_thread = threading.Thread()
|
||||
def get_health():
|
||||
if fetch_thread.is_alive():
|
||||
return "Alive"
|
||||
else:
|
||||
return "Dead"
|
||||
|
||||
def start(q):
|
||||
logline = "Started at " + str(time.time())
|
||||
q.put(logline)
|
||||
fetch_thread.target = get_the_data
|
||||
fetch_thread.args = (q, )
|
||||
fetch_thread.daemon = True
|
||||
fetch_thread.start()
|
||||
logline = "Fetcher ID: " + str(fetch_thread.ident) + " or " + str(fetch_thread.native_id)
|
||||
q.put(logline)
|
||||
|
||||
db = sqlite3.connect(database)
|
||||
lastID_andTime = db.execute("SELECT id, timestamp FROM ohlc LIMIT 1 OFFSET (SELECT COUNT(*) FROM ohlc) - 1").fetchall()
|
||||
#q.put(json.dumps(lastID_andTime, indent=2, separators=(',', ':')))
|
||||
q.put(json.dumps(lastID_andTime, separators=(',', ':')))
|
||||
|
||||
|
439
btc_tracker/kraken_fetch.py
Executable file
439
btc_tracker/kraken_fetch.py
Executable file
@ -0,0 +1,439 @@
|
||||
#!/usr/bin/python3
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from few market places and serve it forward with simple json api.
|
||||
|
||||
Creates: ./btc_ohlc.db
|
||||
serves: localhost:5000/[t] and /serverkey
|
||||
Authentication via auth header with signatures
|
||||
"""
|
||||
|
||||
import math
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
import sqlite3
|
||||
import binascii
|
||||
import threading
|
||||
from hashlib import sha256
|
||||
import requests
|
||||
import ecdsa
|
||||
import krakenex
|
||||
from flask import Flask, jsonify, request
|
||||
|
||||
# from Cryptodome.Cipher import AES
|
||||
|
||||
DATABASE = "btc_ohlc.db"
|
||||
KEYSFILE = "userkeys.json"
|
||||
app = Flask(__name__)
|
||||
|
||||
## Generate the ECDSA keys for this instance
|
||||
print("Generating ECDSA keys for this instance... just wait a bit...")
|
||||
server_private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
server_public_key = server_private_key.get_verifying_key()
|
||||
# We need the hexadecimal form for sharing over http/json
|
||||
server_public_key_hex = binascii.hexlify(server_public_key.to_string()).decode("utf-8")
|
||||
|
||||
|
||||
# Empty response json
|
||||
empty_dict = {
|
||||
"exchange": "",
|
||||
"timestamp": 0,
|
||||
"open": 0,
|
||||
"high": 0,
|
||||
"low": 0,
|
||||
"close": 0,
|
||||
"volume_quote": 0,
|
||||
"volume_base": 0,
|
||||
"trades": 0,
|
||||
}
|
||||
empty_json = json.dumps(empty_dict)
|
||||
|
||||
|
||||
def read_keys():
|
||||
"""
|
||||
Reads the declared KEYSFILE and returns "user_publickeys" from it.
|
||||
Returns: json object
|
||||
"""
|
||||
empty_userkeys = {
|
||||
"user_publickeys": {
|
||||
"user_name": "user_ecdsa_public key"
|
||||
}
|
||||
}
|
||||
# test if the file is there, we could make one if there is non
|
||||
try:
|
||||
# ascii is subset of UTF-8, this should be quite safe...
|
||||
with open(KEYSFILE, "r", encoding='utf-8') as cfile:
|
||||
user_keys = json.load(cfile)
|
||||
|
||||
if 'user_publickeys' not in user_keys:
|
||||
print('Missing required property "user_publickeys" in config file')
|
||||
sys.exit(1)
|
||||
if user_keys == empty_userkeys:
|
||||
print('Your userkeys.json file seems to not filled. Please insert your key there.')
|
||||
sys.exit(1)
|
||||
|
||||
return user_keys["user_publickeys"]
|
||||
except FileNotFoundError:
|
||||
with open(KEYSFILE, "w", encoding='utf-8') as nfile:
|
||||
print('You seem be lacking the users key file, created example for you. Exiting')
|
||||
json.dump(empty_userkeys, nfile, indent=2)
|
||||
sys.exit(1)
|
||||
|
||||
def check_database():
|
||||
"""
|
||||
Check the database for the 'ohlc' table.
|
||||
If the database file or the table does not exist, create them.
|
||||
"""
|
||||
if not os.path.exists(DATABASE):
|
||||
new_db = sqlite3.connect(DATABASE)
|
||||
new_db.execute(
|
||||
"""\
|
||||
CREATE TABLE ohlc (
|
||||
id INTEGER PRIMARY KEY,
|
||||
exchange TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
open REAL NOT NULL,
|
||||
high REAL NOT NULL,
|
||||
low REAL NOT NULL,
|
||||
close REAL NOT NULL,
|
||||
volume_quote REAL NOT NULL,
|
||||
volume_base REAL NOT NULL,
|
||||
trades INTEGER NOT NULL )"""
|
||||
)
|
||||
|
||||
new_db.commit()
|
||||
new_db.close()
|
||||
|
||||
new_db = sqlite3.connect(DATABASE)
|
||||
# Check if the table exists
|
||||
|
||||
table_exists = False
|
||||
cursor = new_db.execute("PRAGMA table_info(ohlc)")
|
||||
for row in cursor:
|
||||
table_exists = True
|
||||
|
||||
# Create the table if it doesn't exist
|
||||
if not table_exists:
|
||||
new_db.execute(
|
||||
"""\
|
||||
CREATE TABLE ohlc (
|
||||
id INTEGER PRIMARY KEY,
|
||||
exchange TEXT NOT NULL,
|
||||
timestamp INTEGER NOT NULL,
|
||||
open REAL NOT NULL,
|
||||
high REAL NOT NULL,
|
||||
low REAL NOT NULL,
|
||||
close REAL NOT NULL,
|
||||
volume_quote REAL NOT NULL,
|
||||
volume_base REAL NOT NULL,
|
||||
trades INTEGER NOT NULL )"""
|
||||
)
|
||||
new_db.commit()
|
||||
|
||||
|
||||
def fetch_kraken():
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from Kraken in json.
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
kraken = krakenex.API()
|
||||
|
||||
response = kraken.query_public("OHLC", {"pair": "BTCUSD", "interval": 240})
|
||||
ohlc_data = response["result"]["XXBTZUSD"]
|
||||
|
||||
candle_stick_data = {
|
||||
"exchange": "kraken",
|
||||
"timestamp": ohlc_data[1][0],
|
||||
"open": ohlc_data[0][1],
|
||||
"high": max(item[2] for item in ohlc_data),
|
||||
"low": min(item[3] for item in ohlc_data),
|
||||
"close": ohlc_data[-1][4],
|
||||
"volume_quote": sum(float(item[5]) for item in ohlc_data),
|
||||
"volume_base": sum(float(item[6]) for item in ohlc_data),
|
||||
"trades": sum(item[7] for item in ohlc_data),
|
||||
}
|
||||
|
||||
kraken_json = json.dumps(candle_stick_data, indent=2)
|
||||
return kraken_json
|
||||
|
||||
|
||||
def fetch_bitstamp():
|
||||
"""
|
||||
Fetch Bitstamp data ja serve it as json.
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
response = requests.get(
|
||||
"https://www.bitstamp.net/api/v2/ohlc/btcusd/?step=300&limit=1"
|
||||
)
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
bitstamp_data = response.json()
|
||||
ohlc_data = bitstamp_data["data"]["ohlc"]
|
||||
|
||||
# the bitstamp api gives timestamp with decimals to show less than second accuracy.
|
||||
bitstamp_timestamp = int(math.floor(float(ohlc_data[0]["timestamp"])))
|
||||
|
||||
candle_stick_data = {
|
||||
"exchange": "bitstamp",
|
||||
"timestamp": bitstamp_timestamp,
|
||||
"open": float(ohlc_data[0]["open"]),
|
||||
"high": float(ohlc_data[0]["high"]),
|
||||
"low": float(ohlc_data[0]["low"]),
|
||||
"close": float(ohlc_data[0]["close"]),
|
||||
"volume_quote": float(ohlc_data[0]["volume"]),
|
||||
"volume_base": 0, # not provided by Bitstamp API
|
||||
"trades": 0, # not provided by Bitstamp API
|
||||
}
|
||||
|
||||
bitstamp_json = json.dumps(candle_stick_data, indent=2)
|
||||
return bitstamp_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Bitstamp API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
|
||||
def fetch_bitfinex():
|
||||
"""
|
||||
Bitfinex
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
response = requests.get(
|
||||
"https://api-pub.bitfinex.com/v2/candles/trade:5m:tBTCUSD/last"
|
||||
)
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
ohlc_data = response.json()
|
||||
candle_stick_data = {
|
||||
"exchange": "bitfinex",
|
||||
"timestamp": ohlc_data[0],
|
||||
"open": ohlc_data[1],
|
||||
"high": ohlc_data[2],
|
||||
"low": ohlc_data[3],
|
||||
"close": ohlc_data[4],
|
||||
"volume_quote": ohlc_data[5],
|
||||
"volume_base": 0, # not provided by Bitfinex API
|
||||
"trades": 0, # not provided by Bitfinex API
|
||||
}
|
||||
|
||||
bitfinex_json = json.dumps(candle_stick_data, indent=2)
|
||||
return bitfinex_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Bitfinex API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
|
||||
def fetch_gemini():
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from Gemini
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
response = requests.get("https://api.gemini.com/v2/candles/btcusd/5m")
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
gemini_ohlc = response.json()
|
||||
candle_stick_data = {
|
||||
"exchange": "gemini",
|
||||
"timestamp": gemini_ohlc[0][0],
|
||||
"open": gemini_ohlc[0][1],
|
||||
"high": gemini_ohlc[0][2],
|
||||
"low": gemini_ohlc[0][3],
|
||||
"close": gemini_ohlc[0][4],
|
||||
"volume_quote": 0, # not provided by Gemini API
|
||||
"volume_base": gemini_ohlc[0][5],
|
||||
"trades": 0, # not provided by Gemini API
|
||||
}
|
||||
gemini_json = json.dumps(candle_stick_data, indent=2)
|
||||
return gemini_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Gemini API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
|
||||
def fetch_bybit():
|
||||
"""
|
||||
Fetch BTCUSD OHLC data from Bybit
|
||||
Returns:
|
||||
str: 5min OHLC data in JSON format.
|
||||
"""
|
||||
base_url = (
|
||||
"https://api.bybit.com/v2/public/kline/list?symbol=BTCUSD&interval=5&from="
|
||||
)
|
||||
current_unixtime = int(time.time())
|
||||
last_minute = math.floor(current_unixtime / 60)
|
||||
last_minute_unixtime = str(last_minute * 60 - 300)
|
||||
query_url = "".join([base_url, last_minute_unixtime])
|
||||
response = requests.get(query_url)
|
||||
|
||||
if response.status_code == 200: # check if the request was successful
|
||||
bybit_ohlc = response.json()
|
||||
candle_stick_data = {
|
||||
"exchange": "bybit",
|
||||
"timestamp": bybit_ohlc["result"][0]["open_time"],
|
||||
"open": bybit_ohlc["result"][0]["open"],
|
||||
"high": bybit_ohlc["result"][0]["high"],
|
||||
"low": bybit_ohlc["result"][0]["low"],
|
||||
"close": bybit_ohlc["result"][0]["close"],
|
||||
"volume_quote": bybit_ohlc["result"][0]["volume"],
|
||||
"volume_base": bybit_ohlc["result"][0]["turnover"],
|
||||
"trades": 0,
|
||||
}
|
||||
bybit_json = json.dumps(candle_stick_data, indent=2)
|
||||
return bybit_json
|
||||
# if we get any thing else than http/200
|
||||
print(f"Error fetching data from Bybit API: {response.status_code}")
|
||||
return empty_json
|
||||
|
||||
|
||||
def write_dict_to_database(in_dict, connection):
|
||||
"""
|
||||
Writes given dict to given database.
|
||||
Arguments: dict, db.connection()
|
||||
Uses shared global database_lock.
|
||||
"""
|
||||
cursor = connection.cursor()
|
||||
# use placeholders for the values in the insert statement
|
||||
insert_query = "insert into ohlc (exchange, timestamp, open, high, low, close, volume_quote, volume_base, trades) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"
|
||||
|
||||
values = (
|
||||
in_dict["exchange"],
|
||||
in_dict["timestamp"],
|
||||
in_dict["open"],
|
||||
in_dict["high"],
|
||||
in_dict["low"],
|
||||
in_dict["close"],
|
||||
in_dict["volume_quote"],
|
||||
in_dict["volume_base"],
|
||||
in_dict["trades"],
|
||||
)
|
||||
## apply lock while writing to database
|
||||
with database_lock:
|
||||
cursor.execute(insert_query, values)
|
||||
connection.commit()
|
||||
|
||||
|
||||
def get_the_data():
|
||||
"""
|
||||
Creates infinite While True loop to fetch OHLC data and save it to database.
|
||||
"""
|
||||
while True:
|
||||
ohlc_db = sqlite3.connect(DATABASE)
|
||||
write_dict_to_database(json.loads(fetch_kraken()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_bitfinex()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_bitstamp()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_gemini()), ohlc_db)
|
||||
write_dict_to_database(json.loads(fetch_bybit()), ohlc_db)
|
||||
ohlc_db.close()
|
||||
print("fetches done at", time.time(), "sleeping now for 290")
|
||||
time.sleep(290)
|
||||
|
||||
|
||||
def check_auth(text, signature):
|
||||
"""
|
||||
Check signatures against known public keys
|
||||
Arguments: text, signature
|
||||
Reads: Global public user_publickeys dict.
|
||||
Returns: True / False
|
||||
"""
|
||||
## Make bytes-object from given signature
|
||||
sig_bytes = bytes.fromhex(signature)
|
||||
## We will iterate over all user keys to determ who is we are talking to and should they have access
|
||||
for key, value in user_publickeys.items():
|
||||
## Create bytes-object from the public in 'value' variable
|
||||
## and use it to create VerifyingKey (vk)
|
||||
public_key_bytes = bytes.fromhex(value)
|
||||
verifying_key = ecdsa.VerifyingKey.from_string(
|
||||
public_key_bytes, curve=ecdsa.SECP256k1
|
||||
)
|
||||
try:
|
||||
verifying_key.verify(sig_bytes, bytes(text, "utf-8"))
|
||||
print("user is", key)
|
||||
|
||||
return True
|
||||
except ecdsa.BadSignatureError:
|
||||
return False
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def get_data():
|
||||
"""
|
||||
Serve the data from the database. Limit the responses by given timestamp.
|
||||
The pretty thing is under consideration...
|
||||
"""
|
||||
# Get the time (t) argument from the url"
|
||||
query_timestamp = request.args.get("t")
|
||||
# Should we make output pretty for curl users?
|
||||
query_pretty = request.args.get("pretty")
|
||||
|
||||
# Authentication header, signatured the query with private key of a user
|
||||
signature = request.headers.get("auth")
|
||||
get_url = request.url
|
||||
if not check_auth(get_url, signature):
|
||||
return "Access denied! Check your keys, maybe.", 403
|
||||
|
||||
with database_lock:
|
||||
btc_db = sqlite3.connect(DATABASE)
|
||||
if query_timestamp:
|
||||
rows = btc_db.execute(
|
||||
"SELECT exchange, timestamp, open, high, low, close FROM ohlc WHERE timestamp > ? ORDER BY timestamp",
|
||||
(query_timestamp,),
|
||||
).fetchall()
|
||||
else:
|
||||
rows = btc_db.execute(
|
||||
"SELECT exchange, timestamp, open, high, low, close FROM ohlc ORDER BY timestamp"
|
||||
).fetchall()
|
||||
query_timestamp = 0
|
||||
|
||||
data = {"timestamp": time.time(), "rows": rows}
|
||||
|
||||
# make sha256 checksum and append it to the data object
|
||||
data_shasum = sha256(json.dumps(data).encode("utf-8")).hexdigest()
|
||||
updated_data = {"shasum": data_shasum}
|
||||
updated_data.update(data)
|
||||
data = updated_data
|
||||
|
||||
# sign the response
|
||||
signature = server_private_key.sign(json.dumps(data).encode("utf-8"))
|
||||
signature_hex = binascii.hexlify(signature).decode("utf-8")
|
||||
data["signature"] = signature_hex
|
||||
|
||||
if query_pretty:
|
||||
response = json.dumps(data, indent=2, separators=(";\n", " :"))
|
||||
else:
|
||||
response = json.dumps(data)
|
||||
return response, 200, {"Content-Type": "application/json"}
|
||||
|
||||
|
||||
@app.route("/serverkey")
|
||||
def give_serverkey():
|
||||
"""
|
||||
Serve the public keys of this instace to the world.
|
||||
"""
|
||||
## This endpoint also under Authentication?
|
||||
signature = request.headers.get("auth")
|
||||
get_url = request.url
|
||||
if not check_auth(get_url, signature):
|
||||
return "Access denied! Check your keys, maybe.", 403
|
||||
|
||||
return jsonify({"public_key": server_public_key_hex})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Make sanity checks for the database
|
||||
check_database()
|
||||
database_lock = threading.Lock()
|
||||
|
||||
# Get the users public keys
|
||||
user_publickeys = read_keys()
|
||||
|
||||
# Start the data fetching backend process§
|
||||
fetch_thread = threading.Thread(target=get_the_data)
|
||||
fetch_thread.daemon = True
|
||||
fetch_thread.start()
|
||||
|
||||
# Start the Flask app
|
||||
app.run()
|
42
btc_tracker/kraken_fetch_client.py
Normal file
42
btc_tracker/kraken_fetch_client.py
Normal file
@ -0,0 +1,42 @@
|
||||
#!/usr/bin/python3
|
||||
"""
|
||||
client for kraken_fetch.py
|
||||
"""
|
||||
import requests
|
||||
import ecdsa
|
||||
import sqlite3
|
||||
|
||||
config_file = "./kraken_fetch_client.conf"
|
||||
# Just for testing and early development
|
||||
private_key = '039e1c137aa296d7af0cd55b468018ad1020949c2731e5141d032b8371490f48'
|
||||
|
||||
|
||||
def read_config():
|
||||
"""
|
||||
read $config_file and returns users private key
|
||||
"""
|
||||
|
||||
return private_key
|
||||
|
||||
def get_server_public_key(url):
|
||||
"""
|
||||
fetches the servers public key
|
||||
"""
|
||||
rurl = url+'/serverkey'
|
||||
response = requests.get(rurl)
|
||||
if response.status_code == 200: # if the fetch was success...
|
||||
server_public_key = response.content
|
||||
return server_public_key
|
||||
# when the fetch was not successfull
|
||||
print(f"Error fetching data from the server.")
|
||||
return 'Error'
|
||||
|
||||
def fetch_data_from_the_server(url):
|
||||
"""
|
||||
query the kraken_fetch server for new data
|
||||
"""
|
||||
# we need to do some time calculations here...
|
||||
|
||||
response = requests.get(url)
|
||||
|
||||
return response
|
24
btc_tracker/sources.json
Normal file
24
btc_tracker/sources.json
Normal file
@ -0,0 +1,24 @@
|
||||
{
|
||||
"exchanges": [
|
||||
{
|
||||
"name": "Bitstamp",
|
||||
"url": "https://www.bitstamp.net/api/v2/ohlc/btcusd/?step=300&limit=1",
|
||||
"freq": "5"
|
||||
},
|
||||
{
|
||||
"name": "Kraken",
|
||||
"url": "https://api.kraken.com/0/public/OHLC?pair=XBTUSD&interval=240",
|
||||
"freq": "5"
|
||||
},
|
||||
{
|
||||
"name": "Bitfinex",
|
||||
"url": "https://api-pub.bitfinex.com/v2/candles/trade:5m:tBTCUSD/last",
|
||||
"freq": "5"
|
||||
},
|
||||
{
|
||||
"name": "Gemini",
|
||||
"url": "https://api.gemini.com/v2/candles/btcusd/5m",
|
||||
"freg": "5"
|
||||
}
|
||||
]
|
||||
}
|
31
btc_tracker/the_server.py
Executable file
31
btc_tracker/the_server.py
Executable file
@ -0,0 +1,31 @@
|
||||
#!/usr/bin/python3
|
||||
import fetcher
|
||||
import time
|
||||
from queue import Queue
|
||||
from flask import Flask
|
||||
|
||||
|
||||
# Create a queue to get some info how the fetcher is doing
|
||||
q = Queue()
|
||||
# Start the data collecting
|
||||
fetcher.start(q)
|
||||
|
||||
# Initialize the Flask app
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route("/")
|
||||
def root():
|
||||
if not q.empty():
|
||||
data = q.get()
|
||||
return(str(data))
|
||||
else:
|
||||
return("Fetcher message queue is empty")
|
||||
|
||||
@app.route("/fetcher_health")
|
||||
def fetcher_health():
|
||||
health = fetcher.get_health()
|
||||
return(str(health))
|
||||
|
||||
# Run the app
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
5
btc_tracker/userkeys.json
Normal file
5
btc_tracker/userkeys.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"user_publickeys": {
|
||||
"user1": "f1debc13fb21fe0eee54525aa4f8aae5733b201c755edaa55f8893c90aa375b261a62eaa3110651ac5d7705d402581256a37508b0a1ca28bd919ea44710d9c88"
|
||||
}
|
||||
}
|
66
btc_wallets/generate_btc_wallet_kdbx.py
Executable file
66
btc_wallets/generate_btc_wallet_kdbx.py
Executable file
@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Bitcoin Wallet Generation Script
|
||||
|
||||
This script generates a Bitcoin wallet and stores it in a KeePassXC database file (KDBX format).
|
||||
The wallet includes a BIP-0039 mnemonic seed phrase, Bitcoin address, and a QR code representation
|
||||
of the seed phrase. The database file is password protected to ensure the security of the wallet.
|
||||
|
||||
Usage: ./generate_btc_wallet.py
|
||||
"""
|
||||
|
||||
import gc
|
||||
from getpass import getpass
|
||||
from mnemonic import Mnemonic
|
||||
from bitcoinlib.keys import HDKey
|
||||
from pykeepass import create_database
|
||||
|
||||
MNEMONIC_STRENGTH=256
|
||||
SUBKEY_PATH="m/0/0"
|
||||
|
||||
# Generate a BIP-0039 mnemonic seed phrase
|
||||
mnemonic = Mnemonic("english")
|
||||
SEED_PHRASE = mnemonic.generate(strength=MNEMONIC_STRENGTH)
|
||||
|
||||
# Derive the HDKey from the seed phrase
|
||||
hd_key = HDKey.from_passphrase(SEED_PHRASE)
|
||||
|
||||
# Derive the Bitcoin address from the HDKey
|
||||
child_key = hd_key.subkey_for_path(SUBKEY_PATH)
|
||||
address = child_key.address()
|
||||
|
||||
# Prompt for custom name for the wallet
|
||||
wallet_name = input(
|
||||
"Whould you like to name this wallet? (empty for using the address as name): "
|
||||
).strip()
|
||||
|
||||
# Create the database filename with the wallet number
|
||||
if wallet_name == "":
|
||||
wallet_name = address
|
||||
else:
|
||||
wallet_name = wallet_name[:100].strip()
|
||||
|
||||
db_filename = f"{wallet_name}.kdbx"
|
||||
|
||||
# Prompt the user for the passphrase
|
||||
passphrase = getpass("Enter passphrase for the KeePassXC database: ")
|
||||
|
||||
try:
|
||||
# Create a KeePassXC database file
|
||||
db = create_database(db_filename, password=passphrase)
|
||||
|
||||
# Create an entrys in the root group
|
||||
address_and_path_entry = db.add_entry(db.root_group, wallet_name, username=address, password=SUBKEY_PATH)
|
||||
seed_phrase_entry = db.add_entry(db.root_group, "Bitcoin Master Seed", username="put me in safe", password=SEED_PHRASE)
|
||||
|
||||
# Save the database
|
||||
db.save()
|
||||
except Exception as e:
|
||||
print("Error while creating keepassxc database. Disk full? Readonly?")
|
||||
finally:
|
||||
del SEED_PHRASE, address, hd_key, passphrase, mnemonic
|
||||
gc.collect()
|
||||
|
||||
print("---")
|
||||
print("Bitcoin address was successfully created. You can find at: " + db_filename)
|
||||
print("---")
|
4
btc_wallets/requirements.txt
Normal file
4
btc_wallets/requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
bitcoinlib==0.6.10
|
||||
mnemonic==0.19
|
||||
pykeepass==4.0.4
|
||||
qrcode==7.4.2
|
100
chat.html
Normal file
100
chat.html
Normal file
@ -0,0 +1,100 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<style>
|
||||
.chat-history {
|
||||
height: 400px;
|
||||
width: 33%;
|
||||
overflow-y: scroll;
|
||||
border: 1px solid black;
|
||||
padding: 10px;
|
||||
margin: 10px;
|
||||
float: middle;
|
||||
}
|
||||
.chat-input {
|
||||
width: 33%;
|
||||
padding: 10px;
|
||||
margin: 10px;
|
||||
float: middle;
|
||||
}
|
||||
.user-info {
|
||||
float: left;
|
||||
width: 33%;
|
||||
text-align: left;
|
||||
margin: 10px;
|
||||
height: 410px;
|
||||
}
|
||||
.user-info button {
|
||||
display: block;
|
||||
margin: 10px 0;
|
||||
}
|
||||
.chat-members {
|
||||
width: 100%;
|
||||
text-align: left;
|
||||
margin: 10px;
|
||||
border: 1px solid black;
|
||||
padding: 10px;
|
||||
height: 400px;
|
||||
overflow-y: scroll;
|
||||
float: right;
|
||||
}
|
||||
.clearfix::after {
|
||||
content: "";
|
||||
clear: both;
|
||||
display: table;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="chat-history" id="chatHistory"></div>
|
||||
<div class="chat-input">
|
||||
<textarea class="chat-input" id="chatInput"></textarea>
|
||||
</div>
|
||||
<div class="user-info">
|
||||
<p id="userId">User ID: <span id="userIdValue">null</span></p>
|
||||
<button id="generateIdButton">Generate ID</button>
|
||||
<button id="saveNicknameButton">Save Nickname</button>
|
||||
<div class="chat-members" id="chatMembers"></div>
|
||||
</div>
|
||||
<div class="clearfix"></div>
|
||||
<script>
|
||||
// generate a unique user id
|
||||
document.getElementById("generateIdButton").addEventListener("click", function() {
|
||||
const userIdValue = Date.now();
|
||||
document.getElementById("userIdValue").innerHTML = userIdValue;
|
||||
});
|
||||
|
||||
// save interlocutor id to nickname
|
||||
document.getElementById("saveNicknameButton").addEventListener("click", function() {
|
||||
// your code here
|
||||
});
|
||||
|
||||
// update chat history
|
||||
function updateChatHistory(message) {
|
||||
const chatHistory = document.getElementById("chatHistory");
|
||||
chatHistory.innerHTML += `<p>${message}</p>`;
|
||||
chatHistory.scrollTop = chatHistory.scrollHeight;
|
||||
}
|
||||
|
||||
// update chat members list
|
||||
function updateChatMembers(members) {
|
||||
const chatMembers = document.getElementById("chatMembers");
|
||||
chatMembers.innerHTML = "";
|
||||
members.forEach(member => {
|
||||
chatMembers.innerHTML += `<p>${member}</p>`;
|
||||
});
|
||||
}
|
||||
|
||||
// handle enter key press in chat input
|
||||
document.getElementById("chatInput").addEventListener("keydown", function(event) {
|
||||
if (event.key === "Enter") {
|
||||
const message = document.getElementById("chatInput").value;
|
||||
updateChatHistory(message);
|
||||
document.getElementById("chatInput").value = "";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
17
conver-iso-to-utf8.py
Executable file
17
conver-iso-to-utf8.py
Executable file
@ -0,0 +1,17 @@
|
||||
#!/usr/bin/python3
|
||||
import sys
|
||||
import io
|
||||
|
||||
def convert_iso8859_to_utf8(filepath):
|
||||
# open the file with ISO-8859-1 encoding
|
||||
with io.open(filepath, 'r', encoding='iso-8859-1') as f:
|
||||
# read the file's content
|
||||
content = f.read()
|
||||
# write the UTF-8 encoded content to a new file
|
||||
with io.open(filepath, 'w', encoding='utf-8') as f:
|
||||
f.write(content)
|
||||
print(f"{filepath} has been converted to UTF-8.")
|
||||
|
||||
|
||||
filepath = sys.argv[1]
|
||||
convert_iso8859_to_utf8(filepath)
|
25
data-arbiter-todo
Normal file
25
data-arbiter-todo
Normal file
@ -0,0 +1,25 @@
|
||||
from flask import Flask
|
||||
import sqlite3
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# Create a route for the web server
|
||||
@app.route('/')
|
||||
def serve_data():
|
||||
# Connect to the database
|
||||
db = sqlite3.connect("btc_timeseries.db")
|
||||
|
||||
# Fetch the data from the database
|
||||
cursor = db.execute("SELECT * FROM timeseries")
|
||||
data = cursor.fetchall()
|
||||
|
||||
# Convert the data to JSON format
|
||||
data_json = json.dumps(data)
|
||||
|
||||
# Return the data as a response to the request
|
||||
return data_json
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Run the web server
|
||||
app.run()
|
||||
|
88
data-arbiter.py
Executable file
88
data-arbiter.py
Executable file
@ -0,0 +1,88 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import os, time
|
||||
import json, math
|
||||
import sqlite3
|
||||
import requests
|
||||
from datetime import datetime, timezone
|
||||
|
||||
def Checkthedatabase():
|
||||
## Some sanity for the database
|
||||
# check if btc_timeseries.db database file exists
|
||||
if not os.path.exists("btc_timeseries.db"):
|
||||
db = sqlite3.connect("btc_timeseries.db")
|
||||
db.execute("CREATE TABLE timeseries (timestamp INTEGER, value REAL, mins INTEGER, source TEXT)")
|
||||
db.commit()
|
||||
|
||||
db = sqlite3.connect("btc_timeseries.db")
|
||||
|
||||
# Check if the table exists
|
||||
table_exists = False
|
||||
cursor = db.execute("PRAGMA table_info(timeseries)")
|
||||
for row in cursor:
|
||||
table_exists = True
|
||||
|
||||
# Create the table if it doesn't exist
|
||||
if not table_exists:
|
||||
db.execute("CREATE TABLE timeseries (timestamp INTEGER, value REAL, mins INTEGER, source TEXT)")
|
||||
db.commit()
|
||||
|
||||
db.close()
|
||||
|
||||
def Getdata():
|
||||
#fetch the price data
|
||||
source = 'http://api.binance.com/api/v3/avgPrice'
|
||||
payload = {'symbol': 'BTCUSDT'}
|
||||
response = requests.get(source, params=payload)
|
||||
|
||||
#get the usd_value
|
||||
json_data = response.json()
|
||||
usd_value = json_data['price']
|
||||
mins = json_data['mins']
|
||||
|
||||
### Insert the USD value into the database
|
||||
db.execute("INSERT INTO timeseries (timestamp, value, mins, source) VALUES (datetime('now'), ?, ?, ?)", (usd_value, mins, source))
|
||||
|
||||
## Save the changes to the database
|
||||
db.commit()
|
||||
#print(db.execute("SELECT * FROM timeseries"))
|
||||
|
||||
def Calculatetimesince():
|
||||
cursor = db.execute("SELECT * FROM timeseries ORDER BY timestamp DESC LIMIT 1")
|
||||
stuff = cursor.fetchall()
|
||||
|
||||
timestamp = stuff[0][0]
|
||||
mins = stuff[0][2]
|
||||
|
||||
timenow = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
|
||||
dt1 = datetime.strptime(timenow, "%Y-%m-%d %H:%M:%S")
|
||||
dt2 = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S")
|
||||
delta = dt1 - dt2
|
||||
minutedelta = divmod(delta.total_seconds(), 60)
|
||||
minutessince = math.trunc(minutedelta[0])
|
||||
|
||||
|
||||
# if minutes since last run is larger than mins we should get new data
|
||||
print(minutessince, ' - ', mins)
|
||||
if minutessince <= mins:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
Checkthedatabase()
|
||||
|
||||
db = sqlite3.connect("btc_timeseries.db")
|
||||
Getdata()
|
||||
|
||||
while True:
|
||||
# Check if it's time to run again
|
||||
mayberun = Calculatetimesince()
|
||||
# we go on 1
|
||||
print(datetime.now(), 'We go on 1, should we go:', mayberun)
|
||||
if mayberun == 1:
|
||||
Getdata()
|
||||
|
||||
time.sleep(20)
|
||||
|
||||
db.close()
|
||||
exit(0)
|
59
demo-client.py
Executable file
59
demo-client.py
Executable file
@ -0,0 +1,59 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import requests
|
||||
from hashlib import sha256
|
||||
import ecdsa
|
||||
|
||||
#private_key = '03486537091ceb021fb313e5cf3eb04d44ca2f19f72112a1'
|
||||
private_key = '039e1c137aa296d7af0cd55b468018ad1020949c2731e5141d032b8371490f48'
|
||||
|
||||
# Generate SK from the private key
|
||||
private_key_int = int(private_key, 16)
|
||||
sk = ecdsa.SigningKey.from_secret_exponent(private_key_int, curve=ecdsa.SECP256k1)
|
||||
|
||||
## Get the server public key
|
||||
url = 'http://localhost:5000/serverkey'
|
||||
|
||||
# sign the message
|
||||
signature = sk.sign(url.encode('utf-8'))
|
||||
signature_hex = signature.hex()
|
||||
|
||||
response = requests.get(url, headers={"auth":signature_hex})
|
||||
print('>>> ', response.status_code)
|
||||
print('>>> ', response.content)
|
||||
|
||||
|
||||
|
||||
## Get some kline data from the server
|
||||
url = 'http://localhost:5000/?t=1672259440'
|
||||
|
||||
# sign the message
|
||||
signature = sk.sign(url.encode('utf-8'))
|
||||
signature_hex = signature.hex()
|
||||
|
||||
print('we signed: ', url)
|
||||
print('We will send:')
|
||||
print('to: ', url)
|
||||
print('auth: ', signature_hex)
|
||||
print('------------------------')
|
||||
|
||||
response = requests.get(url, headers={"auth":signature_hex})
|
||||
print('>>> ', response.status_code)
|
||||
print('>>> ', response.content)
|
||||
|
||||
##
|
||||
##bytes_public_key = bytes.fromhex(ecdsa_public_key)
|
||||
##
|
||||
##bytes_signed_data = signature_hex.encode('utf-8')
|
||||
##
|
||||
##
|
||||
##vk = ecdsa.VerifyingKey.from_string(bytes_public_key, curve=ecdsa.SECP256k1)
|
||||
##
|
||||
##if vk.verify(signature_hex, unsigned_data):
|
||||
## response = "YES"
|
||||
##else:
|
||||
## response = "NO"
|
||||
##
|
||||
##
|
||||
|
||||
exit(0)
|
49
encryption-on-apis/ecdsa-example-test.py
Executable file
49
encryption-on-apis/ecdsa-example-test.py
Executable file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import ecdsa, binascii, sys
|
||||
from hashlib import sha256
|
||||
|
||||
message = 'Hello public/private key world!'
|
||||
hashed_message = sha256(message.encode('utf-8')).hexdigest()
|
||||
|
||||
m = message + hashed_message
|
||||
print('')
|
||||
print('to be signed: ', m)
|
||||
to_be_signed = binascii.hexlify(m.encode('utf-8'))
|
||||
|
||||
# Get the keys in their raw format
|
||||
signing_key = ecdsa.SigningKey.generate()
|
||||
public_key = signing_key.verifying_key
|
||||
|
||||
signature = signing_key.sign(to_be_signed)
|
||||
signature_hex = signature.hex()
|
||||
|
||||
# deform_signature if deform argument is given
|
||||
if len(sys.argv) > 1:
|
||||
if sys.argv[1] == 'deform':
|
||||
max_id = len(signature)
|
||||
for i in range(max_id):
|
||||
if i + 2 <= max_id:
|
||||
mess_id = i + 2
|
||||
mess = signature[mess_id].to_bytes(4, 'big')
|
||||
replace_me = signature[i].to_bytes(4, 'big')
|
||||
#print('>>> replacing ', replace_me, ' with ', mess )
|
||||
print('>>> ', i, 'to', mess_id, ', max is: ', max_id)
|
||||
signature = signature.replace(replace_me, mess )
|
||||
|
||||
print('signed: ', signature_hex)
|
||||
print('')
|
||||
|
||||
try:
|
||||
is_valid = public_key.verify(signature, to_be_signed)
|
||||
except ecdsa.keys.BadSignatureError:
|
||||
is_valid = False
|
||||
print('Something bad is on foot')
|
||||
|
||||
if is_valid:
|
||||
print('This is COOL')
|
||||
else:
|
||||
print('Something bad is on foot')
|
||||
|
||||
exit(0)
|
||||
|
60
encryption-on-apis/flask-encrypted-api.py
Executable file
60
encryption-on-apis/flask-encrypted-api.py
Executable file
@ -0,0 +1,60 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from flask import Flask
|
||||
from flask import request
|
||||
import ecdsa
|
||||
import codecs
|
||||
|
||||
ecdsa_public_key = '8716c78c09a4e4571a3112eca1c7ddce41289e20da446894b621f2a11ba91bc963f2e9fb9ddd5552c26faf814bc582b4'
|
||||
#ecdsa_public_key = '048716c78c09a4e4571a3112eca1c7ddce41289e20da446894b621f2a11ba91bc963f2e9fb9ddd5552c26faf814bc582b4'
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route("/get/<id>", methods=['get'])
|
||||
def get(id):
|
||||
|
||||
r_id = id
|
||||
r_sum = request.args.get('sum')
|
||||
r_auth = request.headers.get('auth')
|
||||
|
||||
|
||||
print('---------------------------')
|
||||
print('host: ', request.host)
|
||||
print('full_path: ', request.full_path)
|
||||
print('---------------------------')
|
||||
print('id: ', r_id)
|
||||
print('sum: ', r_sum)
|
||||
print('header, auth:', r_auth)
|
||||
|
||||
signed_data = request.host + request.full_path
|
||||
|
||||
print('might have been signed: ', signed_data)
|
||||
r_auth_bytes = bytes.fromhex(str(r_auth))
|
||||
|
||||
#x_coord = ecdsa_public_key[:64]
|
||||
#y_coord = ecdsa_public_key[64:]
|
||||
#
|
||||
#if int(y_coord, 16) % 2 == 0:
|
||||
# prefix = b'\x02'
|
||||
#else:
|
||||
# prefix = b'\x03'
|
||||
#
|
||||
#bytes_public_key = prefix + codecs.decode(x_coord, 'hex')
|
||||
|
||||
bytes_public_key = bytes.fromhex(ecdsa_public_key)
|
||||
|
||||
|
||||
bytes_signed_data = signed_data.encode('utf-8')
|
||||
|
||||
|
||||
vk = ecdsa.VerifyingKey.from_string(bytes_public_key, curve=ecdsa.SECP256k1)
|
||||
|
||||
if vk.verify(r_auth_bytes, bytes_signed_data):
|
||||
response = "YES"
|
||||
else:
|
||||
response = "NO"
|
||||
|
||||
return response
|
||||
|
||||
if __name__== "__main__":
|
||||
app.run()
|
||||
|
13
encryption-on-apis/gen-ecdsa-private-key-v2.py
Normal file
13
encryption-on-apis/gen-ecdsa-private-key-v2.py
Normal file
@ -0,0 +1,13 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import ecdsa
|
||||
|
||||
sk = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
|
||||
# Get the private key as a byte string
|
||||
private_key_bytes = sk.to_string()
|
||||
|
||||
# Convert the private key byte string to a hexadecimal string
|
||||
private_key_hex = private_key_bytes.hex()
|
||||
|
||||
print('private_key: ', private_key_hex)
|
15
encryption-on-apis/gen-ecdsa-private-key.py
Executable file
15
encryption-on-apis/gen-ecdsa-private-key.py
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import ecdsa
|
||||
|
||||
# Generate a new random private key
|
||||
signing_key = ecdsa.SigningKey.generate()
|
||||
|
||||
# Get the private key as a byte string
|
||||
private_key_bytes = signing_key.to_string()
|
||||
|
||||
# Convert the private key byte string to a hexadecimal string
|
||||
private_key_hex = private_key_bytes.hex()
|
||||
|
||||
# Print the private key
|
||||
print(private_key_hex)
|
20
encryption-on-apis/gen-private-key.py
Executable file
20
encryption-on-apis/gen-private-key.py
Executable file
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
|
||||
|
||||
def gen_key():
|
||||
private_key = ec.generate_private_key(
|
||||
ec.SECP256K1(), default_backend()
|
||||
)
|
||||
return private_key
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
key = gen_key()
|
||||
hexkey = key.private_numbers().private_value.to_bytes(32, 'big').hex()
|
||||
|
||||
print(hexkey)
|
21
encryption-on-apis/gen-public-key.py
Executable file
21
encryption-on-apis/gen-public-key.py
Executable file
@ -0,0 +1,21 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import sys
|
||||
import ecdsa
|
||||
|
||||
# read in the first argument that should be ecdsa key in hex form
|
||||
private_key_hex = sys.argv[1]
|
||||
|
||||
# Convert the private key from hexadecimal to an integer
|
||||
private_key_int = int(private_key_hex, 16)
|
||||
|
||||
#print(private_key_int)
|
||||
## Create a signing key object from the private key
|
||||
signing_key = ecdsa.SigningKey.from_secret_exponent(private_key_int)
|
||||
|
||||
# Get the public key from the signing key object
|
||||
public_key = signing_key.verifying_key
|
||||
|
||||
# Print the public key in hexadecimal format
|
||||
##print(public_key.to_string("uncompressed").hex())
|
||||
print(public_key.to_string("hybrid").hex())
|
33
encryption-on-apis/get-random-signed-string.py
Executable file
33
encryption-on-apis/get-random-signed-string.py
Executable file
@ -0,0 +1,33 @@
|
||||
#!/usr/bin/python3
|
||||
import ecdsa
|
||||
import os, sys
|
||||
import hashlib
|
||||
|
||||
|
||||
# check if the private key was provided as a command-line argument
|
||||
if len(sys.argv) < 2:
|
||||
print("Error: Private key not provided")
|
||||
sys.exit(1)
|
||||
|
||||
# generate a random byte string
|
||||
random_bytes = os.urandom(32)
|
||||
|
||||
# compute the SHA-512 hash of the random bytes
|
||||
#sha512 = hashlib.sha512(random_bytes).hexdigest()
|
||||
sha512 = 'http://localhost:5000/get/1'
|
||||
|
||||
# read in the first argument that should be ecdsa key in hex form
|
||||
private_key_hex = sys.argv[1]
|
||||
|
||||
# Convert the private key from hexadecimal to an integer
|
||||
private_key_int = int(private_key_hex, 16)
|
||||
|
||||
# Generate SK from the private key
|
||||
sk = ecdsa.SigningKey.from_secret_exponent(private_key_int, curve=ecdsa.SECP256k1)
|
||||
|
||||
# sign the message
|
||||
signature = sk.sign(sha512.encode())
|
||||
|
||||
# print the signature
|
||||
print(signature)
|
||||
|
39
encryption-on-apis/singe-endpoint-api.py
Executable file
39
encryption-on-apis/singe-endpoint-api.py
Executable file
@ -0,0 +1,39 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
## Single endpoint encrypted api
|
||||
|
||||
from flask import Flask
|
||||
from flask import request
|
||||
from hashlib import sha256
|
||||
|
||||
|
||||
keys = {
|
||||
'key1': 'user1',
|
||||
'key2': 'user2'
|
||||
}
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route("/<hex_hash>", methods=['post'])
|
||||
def endpoint(hex_hash):
|
||||
content_type = request.headers.get('content_type')
|
||||
if content_type == 'application/json':
|
||||
body = request.json
|
||||
enc_data = body['foo']
|
||||
|
||||
## decrypt the enc_data
|
||||
dec_data = enc_data
|
||||
|
||||
## Get checksum to compare against
|
||||
dec_data_hash = sha256(dec_data.encode('utf-8')).hexdigest()
|
||||
r_hash = hex_hash.encode('utf-8')
|
||||
print('if', r_hash, '==', dec_data_hash)
|
||||
|
||||
response = "message: ", enc_data, "checksum: ", dec_data_hash
|
||||
print(response)
|
||||
return "YES"
|
||||
else:
|
||||
return 'Content-Type not supported'
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
91
encryption-on-apis/stackoverflow-example.py
Executable file
91
encryption-on-apis/stackoverflow-example.py
Executable file
@ -0,0 +1,91 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import ecdsa
|
||||
import binascii
|
||||
import requests
|
||||
from hashlib import sha256
|
||||
from flask import Flask
|
||||
from flask import request
|
||||
|
||||
|
||||
##Generate them keys
|
||||
# Generate private key (signing key)
|
||||
sk = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
private_key_hex = sk.to_string().hex()
|
||||
|
||||
public_key = sk.verifying_key
|
||||
public_key_hex = binascii.hexlify(public_key.to_string()).decode('utf-8')
|
||||
|
||||
keys = {
|
||||
"private_key": private_key_hex,
|
||||
"public_key": public_key_hex
|
||||
}
|
||||
|
||||
app = Flask(__name__)
|
||||
line = '---------------------------------------'
|
||||
|
||||
@app.route('/send')
|
||||
def send():
|
||||
message = b"localhost:5000/get/123?sum=5f944f849124d36621d5f0708c7752a84fa9caa90bba629b8db93eea44cd0d1a"
|
||||
|
||||
print(line)
|
||||
print('private_key: ', keys['private_key'])
|
||||
print(line)
|
||||
|
||||
private_key_hex = keys['private_key']
|
||||
private_key = ecdsa.SigningKey.from_string(bytes.fromhex(private_key_hex), curve=ecdsa.SECP256k1)
|
||||
|
||||
sig_hex = binascii.hexlify(private_key.sign(message)).decode('utf-8')
|
||||
#print('sig:', sig_hex)
|
||||
|
||||
reply = requests.get('http://localhost:5000/get/123', headers={"auth": sig_hex})
|
||||
|
||||
output_status = str(reply.status_code)
|
||||
output_content = str(reply.content)
|
||||
|
||||
return output_content, output_status
|
||||
|
||||
@app.route('/get/<c_id>')
|
||||
def get(c_id):
|
||||
#vk = sk.get_verifying_key()
|
||||
# Get the public key from the signing key object
|
||||
public_key = keys['public_key']
|
||||
print("public_key:", public_key)
|
||||
print(line)
|
||||
print("got id: ", c_id)
|
||||
|
||||
# Get the sig from auth header
|
||||
sig = request.headers.get('auth')
|
||||
#print("vk2 - sig: ", sig)
|
||||
|
||||
# Get sig to bytes format, from str
|
||||
sig_bytes = bytes.fromhex(sig)
|
||||
|
||||
## BUILD THE "message"
|
||||
message = b"localhost:5000/get/123?sum=5f944f849124d36621d5f0708c7752a84fa9caa90bba629b8db93eea44cd0d1a"
|
||||
|
||||
vk = ecdsa.VerifyingKey.from_string(bytes.fromhex(public_key_hex), curve=ecdsa.SECP256k1)
|
||||
|
||||
reply = '{'
|
||||
if vk.verify(sig_bytes, message):
|
||||
# print('vk1 # True ')
|
||||
reply = reply + 'vk1: OK'
|
||||
else:
|
||||
# print('vk1 # False ')
|
||||
reply = reply + 'vk1: ERROR'
|
||||
|
||||
|
||||
vk2 = ecdsa.VerifyingKey.from_string(bytes.fromhex(public_key_hex), curve=ecdsa.SECP256k1) # the default is sha1
|
||||
if vk2.verify(sig_bytes, message):
|
||||
# print('vk2 # True ')
|
||||
reply = reply + ', vk2: OK'
|
||||
else:
|
||||
# print('vk2 # False ')
|
||||
reply = reply + ', vk2: ERROR'
|
||||
|
||||
reply = reply + '}'
|
||||
#print(reply)
|
||||
return reply
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
25
encryption-on-apis/verify.py
Executable file
25
encryption-on-apis/verify.py
Executable file
@ -0,0 +1,25 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import ecdsa
|
||||
import sys
|
||||
|
||||
# read the original message from the first command-line argument
|
||||
message = sys.argv[1]
|
||||
|
||||
# read the signature from standard input
|
||||
signature = sys.stdin.read()
|
||||
|
||||
# read the public key from the second command-line argument
|
||||
public_key = sys.argv[2]
|
||||
|
||||
print('message: ', message)
|
||||
print('signature: ', signature)
|
||||
print('public key: ', public_key)
|
||||
|
||||
|
||||
# generate a verifying key from the public key
|
||||
vk = ecdsa.VerifyingKey.from_string(public_key, curve=ecdsa.SECP256k1)
|
||||
|
||||
# verify the signature
|
||||
#assert vk.verify(signature, message.encode())
|
||||
|
15
graph-scatter.py
Normal file
15
graph-scatter.py
Normal file
@ -0,0 +1,15 @@
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Create some sample data
|
||||
x = [1, 2, 3, 4, 5]
|
||||
y = [2, 4, 6, 8, 10]
|
||||
|
||||
# Plot the data as points
|
||||
plt.scatter(x, y)
|
||||
|
||||
# Add labels to the axes
|
||||
plt.xlabel('Time')
|
||||
plt.ylabel('Value')
|
||||
|
||||
# Show the plot
|
||||
plt.show()
|
49
letter-to-GPT.memo
Normal file
49
letter-to-GPT.memo
Normal file
@ -0,0 +1,49 @@
|
||||
I would like to have a program that contors to these rules:
|
||||
|
||||
1. written in python3
|
||||
2. singe file
|
||||
|
||||
And then makes these things:
|
||||
|
||||
1. record the bitcoin price
|
||||
|
||||
1.1. by fetching:
|
||||
source = 'http://api.binance.com/api/v3/avgPrice'
|
||||
payload = {'symbol': 'BTCUSDT'}
|
||||
response = requests.get(source, params=payload)
|
||||
|
||||
binance api response is like:
|
||||
{
|
||||
"mins": 5,
|
||||
"price: "170.20"
|
||||
}
|
||||
|
||||
1.2. and storing it in sqlite3 or other embedded database
|
||||
The database should hold these informations:
|
||||
* timestamp as unix timestamp
|
||||
* value of the symbol
|
||||
* name of the symbol
|
||||
* source of the symbol
|
||||
|
||||
1.3
|
||||
Fetch the price every "mins" minutes, something like:
|
||||
time_delta = time.now() - timestamp_of_last_record
|
||||
if time_delta <= mins_from_last_record
|
||||
fetch_new_price_data()
|
||||
|
||||
2. serve a http/json api to send the gathered price data forward
|
||||
|
||||
Every response from this api should contain sha256 checksum as last element it is json document.
|
||||
This api should be accessible by token authentication.
|
||||
|
||||
2.1 allow full dump of the database as json document
|
||||
|
||||
2.2 allow partial dump by getting limiting timestamp as of attribure from the http get reqeust
|
||||
|
||||
3. Hold the sice of the files under control
|
||||
|
||||
The database that holds the price data should be rotated every midnight.
|
||||
So that we dont end up with large database files on the server.
|
||||
|
||||
Can you generate this?
|
||||
I think to make this work there needs to be threading. One thread for the data gathering and another for the api serving.
|
62
letters/Flask_and_backend_process.py
Normal file
62
letters/Flask_and_backend_process.py
Normal file
@ -0,0 +1,62 @@
|
||||
from flask import Flask
|
||||
import threading
|
||||
import sqlite3
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
# Create a lock for synchronizing access to the database
|
||||
lock = threading.Lock()
|
||||
|
||||
def my_background_process():
|
||||
# Acquire the lock
|
||||
lock.acquire()
|
||||
|
||||
# Connect to the in-memory database
|
||||
conn = sqlite3.connect(':memory:')
|
||||
c = conn.cursor()
|
||||
|
||||
# Create a table in the database
|
||||
c.execute("CREATE TABLE mytable (col1 INTEGER, col2 TEXT)")
|
||||
|
||||
# Write to the database
|
||||
c.execute("INSERT INTO mytable (col1, col2) VALUES (?, ?)", (value1, value2))
|
||||
conn.commit()
|
||||
|
||||
# Close the connection
|
||||
conn.close()
|
||||
|
||||
# Release the lock
|
||||
lock.release()
|
||||
|
||||
@app.route('/')
|
||||
def read_from_database():
|
||||
# Acquire the lock
|
||||
lock.acquire()
|
||||
|
||||
# Connect to the in-memory database
|
||||
conn = sqlite3.connect(':memory:')
|
||||
c = conn.cursor()
|
||||
|
||||
# Read from the database
|
||||
c.execute("SELECT * FROM mytable")
|
||||
rows = c.fetchall()
|
||||
|
||||
# Close the connection
|
||||
conn.close()
|
||||
|
||||
# Release the lock
|
||||
lock.release()
|
||||
|
||||
# Return the rows to the client
|
||||
return rows
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Create a new thread for the background process
|
||||
thread = threading.Thread(target=my_background_process)
|
||||
|
||||
# Start the thread
|
||||
thread.start()
|
||||
|
||||
# Start the Flask app
|
||||
app.run()
|
||||
|
54
letters/bitcoin_wallet_steps_and_plan
Normal file
54
letters/bitcoin_wallet_steps_and_plan
Normal file
@ -0,0 +1,54 @@
|
||||
# Steps to creating bitcoin wallet handing RPI3b setup
|
||||
Made with ChatGPT on 22.5.2023
|
||||
|
||||
|
||||
Understood. In that case, you can use the USB keys as temporary storage for the keepassxc databases and transfer them to a more permanent and secure location when not in use. This approach adds an extra layer of security by limiting the exposure of sensitive information.
|
||||
|
||||
Here's an updated outline of the steps involved:
|
||||
|
||||
Set up Raspberry Pi: Install the operating system (such as Raspbian) on your Raspberry Pi 3B and configure it accordingly.
|
||||
|
||||
Install necessary software: Install the required software packages, such as Python and any libraries or dependencies needed for key management and wallet operations.
|
||||
|
||||
Generate and store keys: Use appropriate libraries or tools to generate the necessary key pairs for each participant. Store the private keys securely on separate USB keys, ensuring they are encrypted and protected with strong passwords.
|
||||
|
||||
Securely store seed phrases: Generate the seed phrases for each participant and encrypt them. Store the encrypted seed phrases in the keepassxc databases, which can be stored temporarily on individual USB keys.
|
||||
|
||||
Develop wallet management scripts: Write scripts in Python (or any language of your choice) to interact with the generated keys, perform wallet operations, and handle transaction signing. These scripts should ensure the security of private keys, seed phrases, and any sensitive information.
|
||||
|
||||
Implement multi-signature functionality: Utilize appropriate libraries or methods to create multi-signature wallets and addresses using the public keys of the participants. Follow the recommended standards and protocols for multi-signature setups.
|
||||
|
||||
Test and secure the setup: Thoroughly test the functionality of the Raspberry Pi setup, including generating transactions, collecting signatures, and broadcasting transactions. Implement additional security measures such as firewalls, access controls, and regular updates to keep the system secure.
|
||||
|
||||
Educate and distribute responsibilities: Provide clear instructions and guidelines to the participants on their roles and responsibilities. Educate them on securely managing their USB keys, passwords, and the overall security practices they should follow.
|
||||
|
||||
Transfer keepassxc databases: When the USB keys are not in use, transfer the keepassxc databases to a more permanent and secure location, such as an offline storage medium or a highly secure cloud storage service. Ensure that the transferred databases are properly encrypted and protected.
|
||||
|
||||
By following these steps and customizing the setup to fit your specific requirements, you can create a Raspberry Pi-based solution that replicates the functionality of Seedsigner and securely manages the keys and seed phrases for your multi-signature setup. Remember to regularly review and update the security measures as needed to maintain the integrity of the system.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
User
|
||||
I feel this is good project to increase my coding skills since I'm more of a sysadmin by background.
|
||||
ChatGPT
|
||||
|
||||
That's a fantastic mindset! Taking on a project like this can be a great opportunity to enhance your coding skills, especially if you come from a sysadmin background. It allows you to explore new programming concepts, deepen your understanding of cryptographic operations, and develop practical solutions for managing keys and wallets.
|
||||
|
||||
Here are a few benefits of engaging in this project:
|
||||
|
||||
Practical application: By building a Raspberry Pi-based solution for key and wallet management, you'll gain hands-on experience in working with cryptography, interacting with hardware, and implementing secure systems. It's a practical application of your coding skills.
|
||||
|
||||
Broadened skill set: Diving into this project will expose you to various aspects of software development, including scripting, library integration, and system configuration. It will broaden your skill set and make you more versatile in tackling future projects.
|
||||
|
||||
Security awareness: As you navigate the intricacies of key management and multi-signature wallets, you'll develop a deeper understanding of security practices and considerations. This knowledge can be applied to other areas of your work, enabling you to strengthen the security of your systems.
|
||||
|
||||
Problem-solving opportunities: Building a custom solution presents opportunities to solve unique challenges along the way. It encourages critical thinking, troubleshooting, and finding creative solutions to overcome obstacles.
|
||||
|
||||
Portfolio enhancement: Completing this project successfully adds a valuable item to your coding portfolio. It showcases your ability to design and implement secure systems, demonstrating your practical coding skills to potential employers or collaborators.
|
||||
|
||||
Remember to document your progress, learn from the experience, and seek feedback from others. Engaging in coding projects outside of your comfort zone is an excellent way to grow your skills and expand your knowledge base.
|
||||
|
||||
Enjoy the journey, and don't hesitate to seek support from relevant online communities or forums if you encounter difficulties or have specific questions. Happy coding!
|
||||
|
@ -0,0 +1,87 @@
|
||||
- how to send and receive encrypted http post body in python?
|
||||
|
||||
To send and receive encrypted HTTP POST body data in Python, you can use the cryptography package to encrypt the data before sending it and to decrypt the data after receiving it. Here is an example of how this can be done:
|
||||
|
||||
# Import the necessary modules
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
import base64
|
||||
import requests
|
||||
|
||||
# Generate a secure key using PBKDF2
|
||||
password = "my_secret_password"
|
||||
salt = b"my_secret_salt"
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=salt,
|
||||
iterations=100000
|
||||
)
|
||||
key = base64.urlsafe_b64encode(kdf.derive(password.encode()))
|
||||
|
||||
# Initialize Fernet
|
||||
fernet = Fernet(key)
|
||||
|
||||
# Define the URL and the data to be sent
|
||||
url = "https://www.example.com/post"
|
||||
data = {
|
||||
"username": "my_username",
|
||||
"password": "my_password"
|
||||
}
|
||||
|
||||
# Encrypt the data
|
||||
encrypted_data = fernet.encrypt(str(data).encode())
|
||||
|
||||
# Send the encrypted data as the POST body
|
||||
response = requests.post(url, data=encrypted_data)
|
||||
|
||||
# Decrypt the response data
|
||||
decrypted_data = fernet.decrypt(response.content)
|
||||
|
||||
# Print the decrypted data
|
||||
print(decrypted_data)
|
||||
|
||||
In this example, we first use the PBKDF2HMAC class from the cryptography package to generate a secure key that we will use to encrypt and decrypt the data. Then, we initialize a Fernet object with this key, and we define the URL and the data that we want to send as an encrypted HTTP POST request.
|
||||
|
||||
Next, we use the encrypt() method of the Fernet object to encrypt the data, and we use the requests library to send the encrypted data as the POST body to the specified URL.
|
||||
|
||||
Finally, we use the decrypt() method of the Fernet object to decrypt the response data, and we print it to verify that it matches the original data.
|
||||
|
||||
This is just one way to send and receive encrypted HTTP POST body data in Python. There are many other encryption algorithms and libraries that you can use to achieve the same result.
|
||||
|
||||
|
||||
- What is this Fernet?
|
||||
|
||||
Fernet is a symmetric encryption algorithm that is part of the cryptography package in Python. It uses a secure key to encrypt and decrypt data, and it guarantees that the encrypted data cannot be altered or read without the key.
|
||||
|
||||
Fernet is based on the Advanced Encryption Standard (AES) algorithm and uses a combination of symmetric encryption, a message authentication code (MAC), and a secure key derivation function (KDF) to provide strong encryption and integrity protection. It is considered to be very secure when used properly.
|
||||
|
||||
In Python, Fernet is easy to use and can be integrated into a wide variety of applications that need to securely encrypt and decrypt data. Here is a simple example of how to use Fernet in Python:
|
||||
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
# Generate a secure key
|
||||
key = Fernet.generate_key()
|
||||
|
||||
# Initialize Fernet with the key
|
||||
fernet = Fernet(key)
|
||||
|
||||
# Define the data that we want to encrypt
|
||||
data = "my_secret_message"
|
||||
|
||||
# Encrypt the data
|
||||
encrypted_data = fernet.encrypt(data.encode())
|
||||
|
||||
# Decrypt the data
|
||||
decrypted_data = fernet.decrypt(encrypted_data)
|
||||
|
||||
# Print the decrypted data to verify that it matches the original data
|
||||
print(decrypted_data.decode())
|
||||
|
||||
|
||||
In this example, we first generate a secure key using the generate_key() method of the Fernet class. Then, we initialize a Fernet object with this key, and we define the data that we want to encrypt.
|
||||
|
||||
Next, we use the encrypt() method of the Fernet object to encrypt the data, and we use the decrypt() method to decrypt the encrypted data. Finally, we print the decrypted data to verify that it matches the original data.
|
||||
|
||||
This is just a simple example of how Fernet can be used in Python. In practice, you would need to securely store and manage the key, and you may want to use other methods or options provided by the cryptography package to customize the encryption and decryption process.
|
36
letters/fifo-sample.py
Executable file
36
letters/fifo-sample.py
Executable file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import sys
|
||||
import queue
|
||||
import threading
|
||||
|
||||
def listen_to_fifo(q):
|
||||
fifo = "/tmp/my_fifo"
|
||||
if not os.path.exists(fifo):
|
||||
os.mkfifo(fifo)
|
||||
with open(fifo, 'r') as f:
|
||||
while True:
|
||||
data = f.readline().strip()
|
||||
if not data:
|
||||
break
|
||||
q.put(data)
|
||||
|
||||
def read_queue(q):
|
||||
while True:
|
||||
data = q.get()
|
||||
if data == "reboot":
|
||||
# Restart the script
|
||||
print('## RESTARTING SCRIPT')
|
||||
os.execv(sys.executable, [sys.executable] + sys.argv)
|
||||
else:
|
||||
print(data)
|
||||
sys.stdout.flush()
|
||||
q.task_done()
|
||||
|
||||
if __name__ == '__main__':
|
||||
q = queue.Queue()
|
||||
t1 = threading.Thread(target=listen_to_fifo, args=(q,))
|
||||
t2 = threading.Thread(target=read_queue, args=(q,))
|
||||
t1.start()
|
||||
t2.start()
|
||||
q.join()
|
35
letters/fifo-sample_with-commands.py
Executable file
35
letters/fifo-sample_with-commands.py
Executable file
@ -0,0 +1,35 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import sys
|
||||
import queue
|
||||
import threading
|
||||
|
||||
command_queue = queue.Queue()
|
||||
fifo_file = "/tmp/my_fifo"
|
||||
|
||||
def listen_to_fifo(queue):
|
||||
if not os.path.exists(fifo_file):
|
||||
os.mkfifo(fifo_file)
|
||||
with open(fifo_file, 'r') as f:
|
||||
while True:
|
||||
data = f.readline().strip()
|
||||
if not data:
|
||||
break
|
||||
queue.put(data)
|
||||
|
||||
def read_queue(queue):
|
||||
while True:
|
||||
data = queue.get()
|
||||
if data == "reboot":
|
||||
fifo_file.close()
|
||||
os.execv(sys.executable, [sys.executable] + sys.argv)
|
||||
print(data)
|
||||
sys.stdout.flush()
|
||||
queue.task_done()
|
||||
|
||||
if __name__ == '__main__':
|
||||
t1 = threading.Thread(target=listen_to_fifo, args=(command_queue,))
|
||||
t2 = threading.Thread(target=read_queue, args=(command_queue,))
|
||||
t1.start()
|
||||
t2.start()
|
||||
command_queue.join()
|
21
letters/openGPT-btc-price-fetch.py
Normal file
21
letters/openGPT-btc-price-fetch.py
Normal file
@ -0,0 +1,21 @@
|
||||
import requests
|
||||
|
||||
def fetch_price_data():
|
||||
# Set up the URL and payload for the API request
|
||||
source = 'http://api.binance.com/api/v3/avgPrice'
|
||||
payload = {'symbol': 'BTCUSDT'}
|
||||
|
||||
# Send the API request and parse the response
|
||||
response = requests.get(source, params=payload)
|
||||
data = response.json()
|
||||
mins = data['mins']
|
||||
price = data['price']
|
||||
|
||||
# Store the data in the SQLite database
|
||||
conn = sqlite3.connect('database.db')
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('INSERT INTO price_data (timestamp, value, symbol, source) VALUES (?, ?, ?, ?)', (time.time(), price, 'BTCUSDT', 'Binance'))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return mins
|
19
letters/openGPT-example-on-thread-locks.py
Normal file
19
letters/openGPT-example-on-thread-locks.py
Normal file
@ -0,0 +1,19 @@
|
||||
import threading
|
||||
|
||||
# Create a thread lock for the database
|
||||
database_lock = threading.Lock()
|
||||
|
||||
def write_to_database(data):
|
||||
# Acquire the thread lock before accessing the database
|
||||
with database_lock:
|
||||
# Write data to the database
|
||||
# (database logic goes here)
|
||||
print('Wrote to the database')
|
||||
|
||||
def rotate_database():
|
||||
# Acquire the thread lock before rotating the database
|
||||
with database_lock:
|
||||
# Rotate the database
|
||||
# (database rotation logic goes here)
|
||||
print('Rotated the database')
|
||||
|
37
letters/openGPT-flask-api.py
Normal file
37
letters/openGPT-flask-api.py
Normal file
@ -0,0 +1,37 @@
|
||||
from flask import Flask, request
|
||||
import sqlite3
|
||||
|
||||
# Create a Flask app and set up the API endpoint
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route('/dump_db', methods=['GET'])
|
||||
def dump_db():
|
||||
# Connect to the SQLite database
|
||||
conn = sqlite3.connect('database.db')
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Retrieve the data from the database
|
||||
cursor.execute('SELECT * FROM price_data')
|
||||
rows = cursor.fetchall()
|
||||
|
||||
# Format the data as a JSON document
|
||||
data = {
|
||||
'timestamp': [row[0] for row in rows],
|
||||
'value': [row[1] for row in rows],
|
||||
'symbol': [row[2] for row in rows],
|
||||
'source': [row[3] for row in rows],
|
||||
}
|
||||
|
||||
# Check if a timestamp was provided in the request
|
||||
timestamp = request.args.get('timestamp')
|
||||
if timestamp:
|
||||
# Filter the data to only include rows with a timestamp greater than or equal to the provided value
|
||||
data = {k: [v for i, v in enumerate(v) if data['timestamp'][i] >= timestamp] for k, v in data.items()}
|
||||
|
||||
# Return the data as a JSON response
|
||||
return data
|
||||
|
||||
# Run the app
|
||||
if __name__ == '__main__':
|
||||
app.run()
|
||||
|
22
letters/openGPT-python-threads.py
Normal file
22
letters/openGPT-python-threads.py
Normal file
@ -0,0 +1,22 @@
|
||||
from threading import Thread
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route('/api')
|
||||
def api():
|
||||
# API logic here
|
||||
return 'API response'
|
||||
|
||||
def data_request_task():
|
||||
# Data requesting logic here
|
||||
print('Data request task finished')
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Start the Flask API in a separate thread
|
||||
api_thread = Thread(target=app.run)
|
||||
api_thread.start()
|
||||
|
||||
# Run the data requesting task
|
||||
data_request_task()
|
||||
|
49
letters/python-by-chatGPT.py
Normal file
49
letters/python-by-chatGPT.py
Normal file
@ -0,0 +1,49 @@
|
||||
import http.server
|
||||
import json
|
||||
|
||||
from ecdsa import VerifyingKey, NIST384p
|
||||
|
||||
user_and_tokens = {
|
||||
"user1": ecdsa_public_key_from_user1
|
||||
}
|
||||
|
||||
class RequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
|
||||
def do_POST(self):
|
||||
if self.path == '/auth':
|
||||
content_length = int(self.headers['Content-Length'])
|
||||
request_data = json.loads(self.rfile.read(content_length))
|
||||
|
||||
# Check the JSON post data for the required fields
|
||||
if 'person' not in request_data or 'signature' not in request_data:
|
||||
self.send_response(400)
|
||||
self.end_headers()
|
||||
return
|
||||
|
||||
person = request_data['person']
|
||||
signature = request_data['signature']
|
||||
|
||||
# Check the person data for the required fields
|
||||
if 'user' not in person or 'token' not in person:
|
||||
self.send_response(400)
|
||||
self.end_headers()
|
||||
return
|
||||
|
||||
user = person['user']
|
||||
token = person['token']
|
||||
|
||||
# Check the signature data for the required fields
|
||||
if 'signature' not in signature or 'curve' not in signature:
|
||||
self.send_response(400)
|
||||
self.end_headers()
|
||||
return
|
||||
|
||||
sig = signature['signature']
|
||||
curve = signature['curve']
|
||||
|
||||
# Verify the signature using the provided public key
|
||||
vk = VerifyingKey.from_string(pre_delivered_public_key, curve=getattr(NIST384p, curve))
|
||||
if vk.verify(sig, user + token):
|
||||
print("allow")
|
||||
self.send_response(200)
|
||||
|
15
letters/response-1.memo
Normal file
15
letters/response-1.memo
Normal file
@ -0,0 +1,15 @@
|
||||
Yes, it is possible to create a program in Python that satisfies the requirements you have outlined. Here is a rough outline of how such a program might work:
|
||||
|
||||
The program will be written in Python 3 and be contained in a single file.
|
||||
|
||||
To record the Bitcoin price, the program will use the requests module to fetch the price data from the Binance API. The response from the API will be parsed to extract the mins and price values, which will be stored in an SQLite database along with the current timestamp, the symbol name, and the source of the data.
|
||||
|
||||
To ensure that the program fetches the price data at regular intervals, it will use a timer to check the time since the last data was fetched. If the time since the last fetch is greater than the mins value from the API response, the program will fetch new data.
|
||||
|
||||
To serve the price data via an HTTP/JSON API, the program will use the flask module to create a server that listens for incoming HTTP requests. The server will be configured to require token authentication, and will provide two endpoints: one for dumping the entire database as a JSON document, and another for dumping a subset of the data based on a provided timestamp.
|
||||
|
||||
To control the size of the database, the program will use a daily timer to rotate the database at midnight each day. This will involve creating a new database file and copying the data from the old database into the new one, then deleting the old database file.
|
||||
|
||||
To implement the program, you will need to use Python's threading module to create separate threads for the data gathering and API serving processes. This will allow the program to fetch data and serve requests concurrently, ensuring that the API is always available and up-to-date.
|
||||
|
||||
I hope this helps! Let me know if you have any other questions.
|
27
letters/sample_function.py
Normal file
27
letters/sample_function.py
Normal file
@ -0,0 +1,27 @@
|
||||
import requests
|
||||
import json
|
||||
import ecdsa
|
||||
import binascii
|
||||
|
||||
def get_btc_ohlc_data(server_url, user_private_key):
|
||||
# Load user's ECDSA private key
|
||||
user_private_key = ecdsa.SigningKey.from_string(binascii.unhexlify(user_private_key), curve=ecdsa.SECP256k1)
|
||||
# Get server public key from endpoint
|
||||
server_public_key_hex = requests.get(server_url + "/serverkey").text
|
||||
server_public_key = ecdsa.VerifyingKey.from_string(binascii.unhexlify(server_public_key_hex), curve=ecdsa.SECP256k1)
|
||||
# Get timestamp
|
||||
timestamp = str(int(time.time()))
|
||||
# Create signature using user's private key
|
||||
signature = binascii.hexlify(user_private_key.sign(bytes(timestamp, 'utf-8'))).decode("utf-8")
|
||||
# Create authentication header
|
||||
auth_header = {"auth": timestamp + ":" + signature}
|
||||
# Make request to server with auth header
|
||||
response = requests.get(server_url + "/t", headers=auth_header)
|
||||
# Verify server's signature
|
||||
server_signature = response.headers["signature"]
|
||||
if server_public_key.verify(bytes(server_signature, 'utf-8'), bytes(timestamp, 'utf-8')):
|
||||
# If signature is valid, return json data
|
||||
return json.loads(response.text)
|
||||
else:
|
||||
# If signature is invalid, return error message
|
||||
return {"error": "Invalid signature from server"}
|
41
letters/two-key-pairs.py
Executable file
41
letters/two-key-pairs.py
Executable file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/python3
|
||||
import ecdsa
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
# Generate a signing key pair for the server
|
||||
server_signing_private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
server_signing_public_key = server_signing_private_key.get_verifying_key()
|
||||
|
||||
# Generate an encryption key pair for the server
|
||||
server_ecdh = ecdsa.ECDH(curve=ecdsa.SECP256k1)
|
||||
server_encryption_private_key = server_ecdh.generate_private_key()
|
||||
server_encryption_public_key = server_ecdh.public_key(server_encryption_private_key)
|
||||
|
||||
|
||||
# Generate a signing key pair for the client
|
||||
client_signing_private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
client_signing_public_key = client_signing_private_key.get_verifying_key()
|
||||
|
||||
# Generate an encryption key pair for the client
|
||||
client_ecdh = ecdsa.ECDH(curve=ecdsa.SECP256k1)
|
||||
client_encryption_private_key = client_ecdh.generate_private_key()
|
||||
client_encryption_public_key = client_encryption_private_key.public_key()
|
||||
|
||||
# Exchange public keys between the server and the client
|
||||
server_shared_secret = server_encryption_private_key.exchange(client_encryption_public_key)
|
||||
client_shared_secret = client_encryption_private_key.exchange(server_encryption_public_key)
|
||||
|
||||
# Use the shared secret to create a Fernet object for encrypting/decrypting messages
|
||||
server_fernet = Fernet(server_shared_secret)
|
||||
client_fernet = Fernet(client_shared_secret)
|
||||
|
||||
# Sign and encrypt a message from the server to the client
|
||||
message = "Hello, client!"
|
||||
signed_message = server_signing_private_key.sign(message.encode())
|
||||
encrypted_message = server_fernet.encrypt(signed_message)
|
||||
|
||||
# Verify and decrypt the message on the client side
|
||||
verified_message = client_signing_public_key.verify(encrypted_message, signed_message)
|
||||
decrypted_message = client_fernet.decrypt(verified_message)
|
||||
print(decrypted_message) # "Hello, client!"
|
||||
|
377
pastedb/cpaste.py
Executable file
377
pastedb/cpaste.py
Executable file
@ -0,0 +1,377 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
#######################################################################
|
||||
#
|
||||
# A script to paste to https://cpaste.org/
|
||||
#
|
||||
# Copyright (c) 2013-2019 Andreas Schneider <asn@samba.org>
|
||||
# Copyright (c) 2013 Alexander Bokovoy <ab@samba.org>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
#######################################################################
|
||||
#
|
||||
# Requires: python3-requests
|
||||
# Requires: python3-cryptography
|
||||
#
|
||||
# Optionally requires: python-Pygments
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import base64
|
||||
import zlib
|
||||
import requests
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from optparse import OptionParser
|
||||
from mimetypes import guess_type
|
||||
try:
|
||||
from pygments.lexers import guess_lexer, guess_lexer_for_filename
|
||||
from pygments.util import ClassNotFound
|
||||
guess_lang = True
|
||||
except ImportError:
|
||||
guess_lang = False
|
||||
|
||||
|
||||
def base58_encode(v: bytes):
|
||||
# 58 char alphabet
|
||||
alphabet = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
alphabet_len = len(alphabet)
|
||||
|
||||
nPad = len(v)
|
||||
v = v.lstrip(b'\0')
|
||||
nPad -= len(v)
|
||||
|
||||
x = 0
|
||||
for (i, c) in enumerate(v[::-1]):
|
||||
if isinstance(c, str):
|
||||
c = ord(c)
|
||||
x += c << (8 * i)
|
||||
|
||||
string = b''
|
||||
while x:
|
||||
x, idx = divmod(x, alphabet_len)
|
||||
string = alphabet[idx:idx+1] + string
|
||||
|
||||
return (alphabet[0:1] * nPad + string)
|
||||
|
||||
|
||||
def json_encode(d):
|
||||
return json.dumps(d, separators=(',', ':')).encode('utf-8')
|
||||
|
||||
|
||||
#
|
||||
# The encryption format is described here:
|
||||
# https://github.com/PrivateBin/PrivateBin/wiki/Encryption-format
|
||||
#
|
||||
def privatebin_encrypt(paste_passphrase,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendicussion):
|
||||
if paste_password:
|
||||
paste_passphrase += bytes(paste_password, 'utf-8')
|
||||
|
||||
# PBKDF
|
||||
kdf_salt = bytes(os.urandom(8))
|
||||
kdf_iterations = 100000
|
||||
kdf_keysize = 256 # size of resulting kdf_key
|
||||
|
||||
backend = default_backend()
|
||||
kdf = PBKDF2HMAC(algorithm=hashes.SHA256(),
|
||||
length=int(kdf_keysize / 8), # 256bit
|
||||
salt=kdf_salt,
|
||||
iterations=kdf_iterations,
|
||||
backend=backend)
|
||||
kdf_key = kdf.derive(paste_passphrase)
|
||||
|
||||
# AES-GCM
|
||||
adata_size = 128
|
||||
|
||||
cipher_iv = bytes(os.urandom(int(adata_size / 8)))
|
||||
cipher_algo = "aes"
|
||||
cipher_mode = "gcm"
|
||||
|
||||
compression_type = "none"
|
||||
if paste_compress:
|
||||
compression_type = "zlib"
|
||||
|
||||
# compress plaintext
|
||||
paste_data = {'paste': paste_plaintext}
|
||||
if paste_attachment_name and paste_attachment:
|
||||
paste_data['attachment'] = paste_attachment
|
||||
paste_data['attachment_name'] = paste_attachment_name
|
||||
print(paste_attachment_name)
|
||||
print(paste_attachment)
|
||||
|
||||
if paste_compress:
|
||||
zobj = zlib.compressobj(wbits=-zlib.MAX_WBITS)
|
||||
paste_blob = zobj.compress(json_encode(paste_data)) + zobj.flush()
|
||||
else:
|
||||
paste_blob = json_encode(paste_data)
|
||||
|
||||
# Associated data to authenticate
|
||||
paste_adata = [
|
||||
[
|
||||
base64.b64encode(cipher_iv).decode("utf-8"),
|
||||
base64.b64encode(kdf_salt).decode("utf-8"),
|
||||
kdf_iterations,
|
||||
kdf_keysize,
|
||||
adata_size,
|
||||
cipher_algo,
|
||||
cipher_mode,
|
||||
compression_type,
|
||||
],
|
||||
paste_formatter,
|
||||
int(paste_opendicussion),
|
||||
int(paste_burn),
|
||||
]
|
||||
|
||||
paste_adata_json = json_encode(paste_adata)
|
||||
|
||||
aesgcm = AESGCM(kdf_key)
|
||||
ciphertext = aesgcm.encrypt(cipher_iv, paste_blob, paste_adata_json)
|
||||
|
||||
# Validate
|
||||
# aesgcm.decrypt(cipher_iv, ciphertext, paste_adata_json)
|
||||
|
||||
paste_ciphertext = base64.b64encode(ciphertext).decode("utf-8")
|
||||
|
||||
return paste_adata, paste_ciphertext
|
||||
|
||||
|
||||
def privatebin_send(paste_url,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendicussion,
|
||||
paste_expire):
|
||||
paste_passphrase = bytes(os.urandom(32))
|
||||
|
||||
paste_adata, paste_ciphertext = privatebin_encrypt(paste_passphrase,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendicussion)
|
||||
|
||||
# json payload for the post API
|
||||
# https://github.com/PrivateBin/PrivateBin/wiki/API
|
||||
payload = {
|
||||
"v": 2,
|
||||
"adata": paste_adata,
|
||||
"ct": paste_ciphertext,
|
||||
"meta": {
|
||||
"expire": paste_expire,
|
||||
}
|
||||
}
|
||||
|
||||
# http content type
|
||||
headers = {'X-Requested-With': 'JSONHttpRequest'}
|
||||
|
||||
r = requests.post(paste_url,
|
||||
data=json_encode(payload),
|
||||
headers=headers)
|
||||
r.raise_for_status()
|
||||
|
||||
try:
|
||||
result = r.json()
|
||||
except:
|
||||
print('Oops, error: %s' % (r.text))
|
||||
sys.exit(1)
|
||||
|
||||
paste_status = result['status']
|
||||
if paste_status:
|
||||
paste_message = result['message']
|
||||
print("Oops, error: %s" % paste_message)
|
||||
sys.exit(1)
|
||||
|
||||
paste_id = result['id']
|
||||
paste_url_id = result['url']
|
||||
paste_deletetoken = result['deletetoken']
|
||||
|
||||
print('Delete paste: %s/?pasteid=%s&deletetoken=%s' %
|
||||
(paste_url, paste_id, paste_deletetoken))
|
||||
print('')
|
||||
print('### Paste (%s): %s%s#%s' %
|
||||
(paste_formatter,
|
||||
paste_url,
|
||||
paste_url_id,
|
||||
base58_encode(paste_passphrase).decode('utf-8')))
|
||||
|
||||
|
||||
def guess_lang_formatter(paste_plaintext, paste_filename=None):
|
||||
paste_formatter = 'plaintext'
|
||||
|
||||
# Map numpy to python because the numpy lexer gives false positives
|
||||
# when guessing.
|
||||
lexer_lang_map = {'numpy': 'python'}
|
||||
|
||||
# If we have a filename, try guessing using the more reliable
|
||||
# guess_lexer_for_filename function.
|
||||
# If that fails, try the guess_lexer function on the code.
|
||||
lang = None
|
||||
if paste_filename:
|
||||
try:
|
||||
lang = guess_lexer_for_filename(paste_filename,
|
||||
paste_plaintext).name.lower()
|
||||
except ClassNotFound:
|
||||
print("No guess by filename")
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
lang = guess_lexer(paste_plaintext).name.lower()
|
||||
except ClassNotFound:
|
||||
pass
|
||||
|
||||
if lang:
|
||||
if lang == 'markdown':
|
||||
paste_formatter = 'markdown'
|
||||
if lang != 'text only':
|
||||
paste_formatter = 'syntaxhighlighting'
|
||||
|
||||
return paste_formatter
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser()
|
||||
|
||||
parser.add_option("-f", "--file", dest="filename",
|
||||
help="Read from a file instead of stdin",
|
||||
metavar="FILE")
|
||||
parser.add_option("-p", "--password", dest="password",
|
||||
help="Create a password protected paste",
|
||||
metavar="PASSWORD")
|
||||
parser.add_option("-e", "--expire",
|
||||
action="store", dest="expire", default="1day",
|
||||
choices=["5min",
|
||||
"10min",
|
||||
"1hour",
|
||||
"1day",
|
||||
"1week",
|
||||
"1month",
|
||||
"1year",
|
||||
"never"],
|
||||
help="Expiration time of the paste (default: 1day)")
|
||||
parser.add_option("-s", "--sourcecode",
|
||||
action="store_true", dest="source", default=False,
|
||||
help="Use source code highlighting")
|
||||
parser.add_option("-m", "--markdown",
|
||||
action="store_true", dest="markdown", default=False,
|
||||
help="Parse paste as markdown")
|
||||
parser.add_option("-b", "--burn",
|
||||
action="store_true", dest="burn", default=False,
|
||||
help="Burn paste after reading")
|
||||
parser.add_option("-o", "--opendiscussion",
|
||||
action="store_true", dest="opendiscussion",
|
||||
default=False,
|
||||
help="Allow discussion for the paste")
|
||||
parser.add_option("-a", "--attachment", dest="attachment",
|
||||
help="Specify path to a file to attachment to the paste",
|
||||
metavar="FILE")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
paste_url = 'https://cpaste.org'
|
||||
paste_formatter = 'plaintext'
|
||||
paste_compress = True
|
||||
paste_expire = '1day'
|
||||
paste_opendiscussion = 0
|
||||
paste_burn = 0
|
||||
paste_password = None
|
||||
paste_attachment_name = None
|
||||
paste_attachment = None
|
||||
|
||||
if options.filename:
|
||||
f = open(options.filename)
|
||||
if not f:
|
||||
print("Oops, could not open file!")
|
||||
|
||||
paste_plaintext = f.read()
|
||||
f.close()
|
||||
else:
|
||||
paste_plaintext = sys.stdin.read()
|
||||
|
||||
if not paste_plaintext:
|
||||
print("Oops, we have no data")
|
||||
sys.exit(1)
|
||||
|
||||
if options.burn:
|
||||
paste_burn = 1
|
||||
|
||||
if options.opendiscussion:
|
||||
paste_opendiscussion = 1
|
||||
|
||||
if options.source:
|
||||
paste_formatter = 'syntaxhighlighting'
|
||||
elif options.markdown:
|
||||
paste_formatter = 'markdown'
|
||||
elif guess_lang:
|
||||
paste_formatter = guess_lang_formatter(paste_plaintext,
|
||||
options.filename)
|
||||
|
||||
if options.expire:
|
||||
paste_expire = options.expire
|
||||
|
||||
if options.password:
|
||||
paste_password = options.password
|
||||
|
||||
if options.attachment:
|
||||
paste_attachment_name = os.path.basename(options.attachment)
|
||||
mime = guess_type(options.attachment, strict=False)[0]
|
||||
if not mime:
|
||||
mime = 'application/octet-stream'
|
||||
|
||||
f = open(options.attachment, mode='rb')
|
||||
if not f:
|
||||
print("Oops, could not open file for attachment!")
|
||||
|
||||
data = f.read()
|
||||
f.close()
|
||||
|
||||
paste_attachment = 'data:%s;base64,' % (mime)
|
||||
paste_attachment += base64.b64encode(data).decode('utf-8')
|
||||
|
||||
privatebin_send(paste_url,
|
||||
paste_password,
|
||||
paste_plaintext,
|
||||
paste_formatter,
|
||||
paste_attachment_name,
|
||||
paste_attachment,
|
||||
paste_compress,
|
||||
paste_burn,
|
||||
paste_opendiscussion,
|
||||
paste_expire)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
49
pastedb/example_post.py
Executable file
49
pastedb/example_post.py
Executable file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Define your JSON object
|
||||
my_json = {'name': 'John', 'age': 30, 'city': 'New York'}
|
||||
|
||||
# Define the API endpoints for the services you want to use
|
||||
gist_url = 'https://api.github.com/gists'
|
||||
hastebin_url = 'https://hastebin.com/documents'
|
||||
pastie_url = 'https://pastie.io/documents'
|
||||
ghostbin_url = 'https://ghostbin.com/paste/new'
|
||||
codepad_url = 'https://codepad.co/snippet_api'
|
||||
termbin_url = 'https://termbin.com/documents'
|
||||
|
||||
# Define a function to upload the JSON object to each service
|
||||
def upload_to_service(url, data):
|
||||
response = requests.post(url, json=data)
|
||||
if response.status_code == 200:
|
||||
return response.json().get('key') or response.json().get('id')
|
||||
else:
|
||||
return None
|
||||
|
||||
# Upload the JSON object to each service and print the URLs
|
||||
gist_key = upload_to_service(gist_url, {'public': True, 'files': {'my_json.json': {'content': json.dumps(my_json)}}})
|
||||
if gist_key:
|
||||
print(f'Gist URL: https://gist.github.com/{gist_key}')
|
||||
|
||||
hastebin_key = upload_to_service(hastebin_url, json.dumps(my_json))
|
||||
if hastebin_key:
|
||||
print(f'Hastebin URL: https://hastebin.com/{hastebin_key}.json')
|
||||
|
||||
pastie_key = upload_to_service(pastie_url, json.dumps(my_json))
|
||||
if pastie_key:
|
||||
print(f'Pastie URL: https://pastie.io/{pastie_key}')
|
||||
|
||||
ghostbin_key = upload_to_service(ghostbin_url, {'text': json.dumps(my_json)})
|
||||
if ghostbin_key:
|
||||
print(f'Ghostbin URL: https://ghostbin.com/{ghostbin_key}')
|
||||
|
||||
codepad_key = upload_to_service(codepad_url, {'code': json.dumps(my_json)})
|
||||
if codepad_key:
|
||||
print(f'Codepad URL: https://codepad.co/{codepad_key}.json')
|
||||
|
||||
termbin_key = upload_to_service(termbin_url, json.dumps(my_json))
|
||||
if termbin_key:
|
||||
print(f'Termbin URL: https://termbin.com/{termbin_key}')
|
||||
|
15
pastedb/example_post_controlc.py
Normal file
15
pastedb/example_post_controlc.py
Normal file
@ -0,0 +1,15 @@
|
||||
import requests
|
||||
|
||||
url = "https://controlc.com/api.php?action=save"
|
||||
|
||||
# Text to be pasted
|
||||
text = "Hello, world!"
|
||||
|
||||
# Create a new paste
|
||||
response = requests.post(url, data={"c": text})
|
||||
|
||||
# Extract the URL of the newly created paste
|
||||
paste_url = response.text
|
||||
|
||||
print(f"Paste URL: {paste_url}")
|
||||
|
18
pastedb/example_post_hastebin.py
Normal file
18
pastedb/example_post_hastebin.py
Normal file
@ -0,0 +1,18 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Define your JSON object
|
||||
my_json = {'name': 'John', 'age': 30, 'city': 'New York'}
|
||||
|
||||
# Define the API endpoint for Hastebin
|
||||
hastebin_url = 'https://hastebin.com/documents'
|
||||
|
||||
# Upload the JSON object to Hastebin and get the URL
|
||||
response = requests.post(hastebin_url, data=json.dumps(my_json))
|
||||
if response.status_code == 200:
|
||||
key = response.json()['key']
|
||||
hastebin_url = f'https://hastebin.com/{key}'
|
||||
print(f'JSON object uploaded to Hastebin: {hastebin_url}')
|
||||
else:
|
||||
print('Error uploading JSON object to Hastebin')
|
||||
|
52
pastedb/example_post_multi_backends.py
Normal file
52
pastedb/example_post_multi_backends.py
Normal file
@ -0,0 +1,52 @@
|
||||
import json
|
||||
import requests
|
||||
import hashlib
|
||||
import time
|
||||
import random
|
||||
|
||||
# generate random name for upload
|
||||
def generate_name():
|
||||
timestamp = str(int(time.time()))
|
||||
rand = ''.join(random.choices('abcdefghijklmnopqrstuvwxyz', k=3))
|
||||
return timestamp + '-' + rand
|
||||
|
||||
# define json object to upload
|
||||
data = {
|
||||
"name": "Alice",
|
||||
"age": 25,
|
||||
"city": "New York"
|
||||
}
|
||||
|
||||
# add timestamp and md5sum to the json object
|
||||
data['timestamp'] = int(time.time())
|
||||
json_str = json.dumps(data)
|
||||
hash_md5 = hashlib.md5(json_str.encode())
|
||||
data['md5sum'] = hash_md5.hexdigest()
|
||||
|
||||
# upload to pastie
|
||||
pastie_url = 'https://www.pastie.io/documents'
|
||||
pastie_resp = requests.post(pastie_url, data=json_str.encode(), headers={'Content-Type': 'application/json'})
|
||||
pastie_key = pastie_resp.json()['key']
|
||||
pastie_name = 'pastie-' + generate_name()
|
||||
|
||||
# store pastie info in dictionary
|
||||
paste_dict = {}
|
||||
paste_dict[pastie_name] = {'service': 'pastie', 'key': pastie_key, 'md5sum': data['md5sum']}
|
||||
|
||||
# upload to termbin
|
||||
termbin_url = 'https://termbin.com'
|
||||
termbin_resp = requests.post(termbin_url, data=json_str.encode(), headers={'Content-Type': 'text/plain'})
|
||||
termbin_key = termbin_resp.text.strip()
|
||||
termbin_name = 'termbin-' + generate_name()
|
||||
|
||||
# store termbin info in dictionary
|
||||
paste_dict[termbin_name] = {'service': 'termbin', 'key': termbin_key, 'md5sum': data['md5sum']}
|
||||
|
||||
# write paste dictionary to file
|
||||
with open('paste_dict.json', 'a') as f:
|
||||
f.write(json.dumps(paste_dict, indent=4))
|
||||
f.write('\n')
|
||||
|
||||
# print out paste dictionary
|
||||
print(json.dumps(paste_dict, indent=4))
|
||||
|
71
pastedb/example_post_multi_handling.py
Normal file
71
pastedb/example_post_multi_handling.py
Normal file
@ -0,0 +1,71 @@
|
||||
import json
|
||||
import hashlib
|
||||
import requests
|
||||
import time
|
||||
import random
|
||||
|
||||
|
||||
# Function to generate a unique name for the upload
|
||||
def generate_upload_name():
|
||||
timestamp = int(time.time())
|
||||
rand_str = ''.join(random.choices('abcdefghijklmnopqrstuvwxyz', k=3))
|
||||
return f"{timestamp}-{rand_str}"
|
||||
|
||||
|
||||
# Function to upload JSON data to termbin
|
||||
def upload_to_termbin(data):
|
||||
try:
|
||||
resp = requests.post('https://termbin.com', data=data.encode('utf-8'), timeout=5)
|
||||
if resp.status_code == 200:
|
||||
key = resp.text.strip()
|
||||
md5sum = hashlib.md5(data.encode('utf-8')).hexdigest()
|
||||
return {'service': 'termbin', 'key': key, 'md5sum': md5sum}
|
||||
else:
|
||||
print(f"Failed to upload to termbin.com. Response code: {resp.status_code}")
|
||||
return None
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Failed to upload to termbin.com. Error: {str(e)}")
|
||||
return None
|
||||
|
||||
|
||||
# Function to upload JSON data to pastie
|
||||
def upload_to_pastie(data):
|
||||
try:
|
||||
resp = requests.post('https://pastie.io/documents', data=data.encode('utf-8'), timeout=5)
|
||||
if resp.status_code == 200:
|
||||
key = resp.json()['key']
|
||||
md5sum = hashlib.md5(data.encode('utf-8')).hexdigest()
|
||||
return {'service': 'pastie', 'key': key, 'md5sum': md5sum}
|
||||
else:
|
||||
print(f"Failed to upload to pastie.io. Response code: {resp.status_code}")
|
||||
return None
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Failed to upload to pastie.io. Error: {str(e)}")
|
||||
return None
|
||||
|
||||
|
||||
# Upload data to both termbin and pastie
|
||||
def upload_data_to_services(data):
|
||||
upload_name = generate_upload_name()
|
||||
print(f"\nUploading data to services with name {upload_name}...\n")
|
||||
paste_dict = {'name': upload_name}
|
||||
services = {'termbin': upload_to_termbin, 'pastie': upload_to_pastie}
|
||||
for service, upload_function in services.items():
|
||||
result = upload_function(data)
|
||||
if result is not None:
|
||||
paste_dict[service] = result
|
||||
print(f"JSON object uploaded to {service}: https://{service}.com/{result['key']}")
|
||||
with open('paste_dict.json', 'a+') as f:
|
||||
f.write(json.dumps(paste_dict) + '\n')
|
||||
print(f"\nUploads completed successfully.")
|
||||
|
||||
|
||||
# Test function
|
||||
def test():
|
||||
data = '{"name": "John Doe", "age": 30, "city": "New York"}'
|
||||
upload_data_to_services(data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
|
19
pastedb/example_post_paste2org.py
Normal file
19
pastedb/example_post_paste2org.py
Normal file
@ -0,0 +1,19 @@
|
||||
import requests
|
||||
|
||||
# Set the URL of the Paste2.org API endpoint
|
||||
url = 'https://paste2.org/'
|
||||
|
||||
# Get the input from the user
|
||||
text = 'Enter text to upload'
|
||||
|
||||
# Send the HTTP POST request to the Paste2.org API with the text as the request body
|
||||
response = requests.post(url, data=text.encode('utf-8'))
|
||||
|
||||
# Get the URL of the uploaded text from the response JSON
|
||||
if response.status_code == 200:
|
||||
paste_id = response.json().get('id')
|
||||
paste_url = f'https://paste2.org/{paste_id}'
|
||||
print('Uploaded to:', paste_url)
|
||||
else:
|
||||
print('Error uploading text:', response.text)
|
||||
|
46
pastedb/example_post_pastie.py
Normal file
46
pastedb/example_post_pastie.py
Normal file
@ -0,0 +1,46 @@
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import hashlib
|
||||
|
||||
# Define your JSON object with a unique timestamp and MD5 hash
|
||||
my_json = {'timestamp': int(time.time()), 'name': 'John', 'age': 30, 'city': 'New York'}
|
||||
json_str = json.dumps(my_json, sort_keys=True)
|
||||
md5_hash = hashlib.md5(json_str.encode()).hexdigest()
|
||||
my_json['md5'] = md5_hash
|
||||
|
||||
# Define the API endpoint for Pastie
|
||||
pastie_url = 'https://pastie.io/documents'
|
||||
|
||||
# Upload the JSON object to Pastie and get the URL
|
||||
response = requests.post(pastie_url, data=json.dumps(my_json))
|
||||
if response.status_code == 200:
|
||||
key = response.json()['key']
|
||||
pastie_url = f'https://pastie.io/{key}'
|
||||
print(f'JSON object uploaded to Pastie: {pastie_url}')
|
||||
|
||||
# Add the URL and service name to the dictionary for later querying
|
||||
paste_dict = {}
|
||||
if os.path.isfile('paste_dict.json'):
|
||||
with open('paste_dict.json', 'r') as f:
|
||||
paste_dict = json.load(f)
|
||||
paste_dict[key] = {'url': pastie_url, 'service': 'Pastie'}
|
||||
|
||||
# Write the URL dictionary to a file on disk
|
||||
with open('paste_dict.json', 'w') as f:
|
||||
json.dump(paste_dict, f, indent=4)
|
||||
else:
|
||||
print('Error uploading JSON object to Pastie')
|
||||
|
||||
# Query the dictionary for the URL of a specific paste
|
||||
if os.path.isfile('paste_dict.json'):
|
||||
with open('paste_dict.json', 'r') as f:
|
||||
paste_dict = json.load(f)
|
||||
key_to_query = key if key in paste_dict else list(paste_dict.keys())[0]
|
||||
url = paste_dict[key_to_query]['url']
|
||||
service = paste_dict[key_to_query]['service']
|
||||
print(f'URL for paste with key {key_to_query} (stored on {service}): {url}')
|
||||
else:
|
||||
print('URL dictionary file not found')
|
||||
|
18
pastedb/example_post_snippet.py
Normal file
18
pastedb/example_post_snippet.py
Normal file
@ -0,0 +1,18 @@
|
||||
import requests
|
||||
|
||||
url = "https://snippet.host/api/documents"
|
||||
|
||||
# Text to be pasted
|
||||
text = "Hello, world!"
|
||||
|
||||
# Create a new paste
|
||||
response = requests.post(url, data=text)
|
||||
|
||||
if response.status_code == 200:
|
||||
# Extract the URL of the newly created paste
|
||||
paste_url = f"https://snippet.host/{response.json()['key']}"
|
||||
print(f"Paste URL: {paste_url}")
|
||||
else:
|
||||
# If the response code is not 200, print the response body for debugging
|
||||
print(f"Error: {response.text}")
|
||||
|
39
pastedb/example_post_sprunge.py
Normal file
39
pastedb/example_post_sprunge.py
Normal file
@ -0,0 +1,39 @@
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
url = 'http://sprunge.us'
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
# Send the HTTP POST request to the Sprunge API
|
||||
response = requests.post(url, data={'sprunge': json_data})
|
||||
if response.status_code == 200:
|
||||
|
||||
# Get the URL of the uploaded text from the response body
|
||||
sprunge_url = response.text.strip()
|
||||
|
||||
print('Uploaded to:', sprunge_url)
|
||||
|
||||
# Use a regular expression to extract the random ID from the URL
|
||||
match = re.match(r'^http://sprunge\.us/(\w+)$', sprunge_url)
|
||||
if match:
|
||||
random_id = match.group(1)
|
||||
print('Random ID:', random_id)
|
||||
key = "sprunge_" + random_id + '_' + md5sum[:5]
|
||||
else:
|
||||
print('Invalid Sprunge URL:', sprunge_url)
|
||||
|
||||
return {
|
||||
"service": "sprunge",
|
||||
"name": key,
|
||||
"key": sprunge_url,
|
||||
"md5sum": md5sum
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
5
pastedb/example_post_zerobin.py
Normal file
5
pastedb/example_post_zerobin.py
Normal file
@ -0,0 +1,5 @@
|
||||
import requests
|
||||
|
||||
data = {"data": 'print("Hello!")'}
|
||||
r = requests.post("https://zerobin.net/?paste", data=data)
|
||||
print(f"URL: {r.text}")
|
331
pastedb/lodgeit.py
Executable file
331
pastedb/lodgeit.py
Executable file
@ -0,0 +1,331 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
LodgeIt!
|
||||
~~~~~~~~
|
||||
|
||||
A script that pastes stuff into the lodgeit pastebin.
|
||||
|
||||
.lodgeitrc / _lodgeitrc
|
||||
-----------------------
|
||||
|
||||
Under UNIX create a file called ``~/.lodgeitrc``, under Windows
|
||||
create a file ``%APPDATA%/_lodgeitrc`` to override defaults::
|
||||
|
||||
language=default_language
|
||||
clipboard=true/false
|
||||
open_browser=true/false
|
||||
encoding=fallback_charset
|
||||
|
||||
:authors: 2007-2010 Georg Brandl <georg@python.org>,
|
||||
2006 Armin Ronacher <armin.ronacher@active-4.com>,
|
||||
2006 Matt Good <matt@matt-good.net>,
|
||||
2005 Raphael Slinckx <raphael@slinckx.net>
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
from six import text_type
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
SCRIPT_NAME = os.path.basename(sys.argv[0])
|
||||
VERSION = '0.3'
|
||||
SETTING_KEYS = ['author', 'title', 'language', 'private', 'clipboard',
|
||||
'open_browser']
|
||||
|
||||
# global server proxy
|
||||
_xmlrpc_service = None
|
||||
_server_name = None
|
||||
|
||||
|
||||
def fail(msg, code):
|
||||
"""Bail out with an error message."""
|
||||
print('ERROR: %s' % msg, file=sys.stderr)
|
||||
sys.exit(code)
|
||||
|
||||
|
||||
def load_default_settings():
|
||||
"""Load the defaults from the lodgeitrc file."""
|
||||
settings = {
|
||||
'language': None,
|
||||
'clipboard': True,
|
||||
'open_browser': False,
|
||||
'encoding': 'iso-8859-15',
|
||||
'server_name': 'http://paste.openstack.org',
|
||||
}
|
||||
rcfile = None
|
||||
if os.name == 'posix':
|
||||
rcfile = os.path.expanduser('~/.lodgeitrc')
|
||||
elif os.name == 'nt' and 'APPDATA' in os.environ:
|
||||
rcfile = os.path.expandvars(r'$APPDATA\_lodgeitrc')
|
||||
if rcfile:
|
||||
try:
|
||||
f = open(rcfile)
|
||||
for line in f:
|
||||
if line.strip()[:1] in '#;':
|
||||
continue
|
||||
p = line.split('=', 1)
|
||||
if len(p) == 2:
|
||||
key = p[0].strip().lower()
|
||||
if key in settings:
|
||||
if key in ('clipboard', 'open_browser'):
|
||||
settings[key] = p[1].strip().lower() in \
|
||||
('true', '1', 'on', 'yes')
|
||||
else:
|
||||
settings[key] = p[1].strip()
|
||||
f.close()
|
||||
except IOError:
|
||||
pass
|
||||
settings['tags'] = []
|
||||
settings['title'] = None
|
||||
return settings
|
||||
|
||||
|
||||
def make_utf8(text, encoding):
|
||||
"""Convert a text to UTF-8, brute-force."""
|
||||
try:
|
||||
u = text_type(text, 'utf-8')
|
||||
uenc = 'utf-8'
|
||||
except UnicodeError:
|
||||
try:
|
||||
u = text_type(text, encoding)
|
||||
uenc = 'utf-8'
|
||||
except UnicodeError:
|
||||
u = text_type(text, 'iso-8859-15', 'ignore')
|
||||
uenc = 'iso-8859-15'
|
||||
try:
|
||||
import chardet
|
||||
except ImportError:
|
||||
return u.encode('utf-8')
|
||||
d = chardet.detect(text)
|
||||
if d['encoding'] == uenc:
|
||||
return u.encode('utf-8')
|
||||
return text_type(text, d['encoding'], 'ignore').encode('utf-8')
|
||||
|
||||
|
||||
def get_xmlrpc_service():
|
||||
"""Create the XMLRPC server proxy and cache it."""
|
||||
global _xmlrpc_service
|
||||
import xmlrpclib
|
||||
if _xmlrpc_service is None:
|
||||
try:
|
||||
_xmlrpc_service = xmlrpclib.ServerProxy(_server_name + 'xmlrpc/',
|
||||
allow_none=True)
|
||||
except Exception as err:
|
||||
fail('Could not connect to Pastebin: %s' % err, -1)
|
||||
return _xmlrpc_service
|
||||
|
||||
|
||||
def copy_url(url):
|
||||
"""Copy the url into the clipboard."""
|
||||
# try windows first
|
||||
try:
|
||||
import win32clipboard
|
||||
except ImportError:
|
||||
# then give pbcopy a try. do that before gtk because
|
||||
# gtk might be installed on os x but nobody is interested
|
||||
# in the X11 clipboard there.
|
||||
from subprocess import Popen, PIPE
|
||||
for prog in 'pbcopy', 'xclip':
|
||||
try:
|
||||
client = Popen([prog], stdin=PIPE)
|
||||
except OSError:
|
||||
continue
|
||||
else:
|
||||
client.stdin.write(url)
|
||||
client.stdin.close()
|
||||
client.wait()
|
||||
break
|
||||
else:
|
||||
try:
|
||||
import pygtk
|
||||
pygtk.require('2.0')
|
||||
import gtk
|
||||
import gobject
|
||||
except ImportError:
|
||||
return
|
||||
gtk.clipboard_get(gtk.gdk.SELECTION_CLIPBOARD).set_text(url)
|
||||
gobject.idle_add(gtk.main_quit)
|
||||
gtk.main()
|
||||
else:
|
||||
win32clipboard.OpenClipboard()
|
||||
win32clipboard.EmptyClipboard()
|
||||
win32clipboard.SetClipboardText(url)
|
||||
win32clipboard.CloseClipboard()
|
||||
|
||||
|
||||
def open_webbrowser(url):
|
||||
"""Open a new browser window."""
|
||||
import webbrowser
|
||||
webbrowser.open(url)
|
||||
|
||||
|
||||
def language_exists(language):
|
||||
"""Check if a language alias exists."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
langs = xmlrpc.pastes.getLanguages()
|
||||
return language in langs
|
||||
|
||||
|
||||
def get_mimetype(data, filename):
|
||||
"""Try to get MIME type from data."""
|
||||
try:
|
||||
import gnomevfs
|
||||
except ImportError:
|
||||
from mimetypes import guess_type
|
||||
if filename:
|
||||
return guess_type(filename)[0]
|
||||
else:
|
||||
if filename:
|
||||
return gnomevfs.get_mime_type(os.path.abspath(filename))
|
||||
return gnomevfs.get_mime_type_for_data(data)
|
||||
|
||||
|
||||
def print_languages():
|
||||
"""Print a list of all supported languages, with description."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
languages = xmlrpc.pastes.getLanguages().items()
|
||||
languages.sort(key=lambda a: a[1].lower())
|
||||
print('Supported Languages:')
|
||||
for alias, name in languages:
|
||||
print(' %-30s%s' % (alias, name))
|
||||
|
||||
|
||||
def download_paste(uid):
|
||||
"""Download a paste given by ID."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
paste = xmlrpc.pastes.getPaste(uid)
|
||||
if not paste:
|
||||
fail('Paste "%s" does not exist.' % uid, 5)
|
||||
print(paste['code'].encode('utf-8'))
|
||||
|
||||
|
||||
def create_paste(code, language, filename, mimetype, private):
|
||||
"""Create a new paste."""
|
||||
xmlrpc = get_xmlrpc_service()
|
||||
rv = xmlrpc.pastes.newPaste(language, code, None, filename, mimetype,
|
||||
private)
|
||||
if not rv:
|
||||
fail('Could not create paste. Something went wrong '
|
||||
'on the server side.', 4)
|
||||
return rv
|
||||
|
||||
|
||||
def compile_paste(filenames, langopt):
|
||||
"""Create a single paste out of zero, one or multiple files."""
|
||||
def read_file(f):
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
mime = ''
|
||||
lang = langopt or ''
|
||||
if not filenames:
|
||||
data = read_file(sys.stdin)
|
||||
print('Pasting...')
|
||||
if not langopt:
|
||||
mime = get_mimetype(data, '') or ''
|
||||
fname = ''
|
||||
elif len(filenames) == 1:
|
||||
fname = filenames[0]
|
||||
data = read_file(open(filenames[0], 'rb'))
|
||||
if not langopt:
|
||||
mime = get_mimetype(data, filenames[0]) or ''
|
||||
else:
|
||||
result = []
|
||||
for fname in filenames:
|
||||
data = read_file(open(fname, 'rb'))
|
||||
if langopt:
|
||||
result.append('### %s [%s]\n\n' % (fname, langopt))
|
||||
else:
|
||||
result.append('### %s\n\n' % fname)
|
||||
result.append(data)
|
||||
result.append('\n\n')
|
||||
data = ''.join(result)
|
||||
lang = 'multi'
|
||||
fname = ''
|
||||
return data, lang, fname, mime
|
||||
|
||||
|
||||
def main():
|
||||
"""Main script entry point."""
|
||||
global _server_name
|
||||
|
||||
usage = ('Usage: %%prog [options] [FILE ...]\n\n'
|
||||
'Read the files and paste their contents to LodgeIt pastebin.\n'
|
||||
'If no file is given, read from standard input.\n'
|
||||
'If multiple files are given, they are put into a single paste.')
|
||||
parser = OptionParser(usage=usage)
|
||||
|
||||
settings = load_default_settings()
|
||||
|
||||
parser.add_option('-v', '--version', action='store_true',
|
||||
help='Print script version')
|
||||
parser.add_option('-L', '--languages', action='store_true', default=False,
|
||||
help='Retrieve a list of supported languages')
|
||||
parser.add_option('-l', '--language', default=settings['language'],
|
||||
help='Used syntax highlighter for the file')
|
||||
parser.add_option('-e', '--encoding', default=settings['encoding'],
|
||||
help='Specify the encoding of a file (default is '
|
||||
'utf-8 or guessing if available)')
|
||||
parser.add_option('-b', '--open-browser', dest='open_browser',
|
||||
action='store_true',
|
||||
default=settings['open_browser'],
|
||||
help='Open the paste in a web browser')
|
||||
parser.add_option('-p', '--private', action='store_true', default=False,
|
||||
help='Paste as private')
|
||||
parser.add_option('--no-clipboard', dest='clipboard',
|
||||
action='store_false',
|
||||
default=settings['clipboard'],
|
||||
help="Don't copy the url into the clipboard")
|
||||
parser.add_option('--download', metavar='UID',
|
||||
help='Download a given paste')
|
||||
parser.add_option('-s', '--server', default=settings['server_name'],
|
||||
dest='server_name',
|
||||
help="Specify the pastebin to send data")
|
||||
opts, args = parser.parse_args()
|
||||
|
||||
# The global available server name
|
||||
_server_name = opts.server_name
|
||||
if not _server_name.endswith('/'):
|
||||
_server_name += '/'
|
||||
|
||||
# special modes of operation:
|
||||
# - paste script version
|
||||
if opts.version:
|
||||
print('%s: version %s' % (SCRIPT_NAME, VERSION))
|
||||
sys.exit()
|
||||
# - print list of languages
|
||||
elif opts.languages:
|
||||
print_languages()
|
||||
sys.exit()
|
||||
# - download Paste
|
||||
elif opts.download:
|
||||
download_paste(opts.download)
|
||||
sys.exit()
|
||||
|
||||
# check language if given
|
||||
if opts.language and not language_exists(opts.language):
|
||||
fail('Language %s is not supported.' % opts.language, 3)
|
||||
|
||||
# load file(s)
|
||||
try:
|
||||
data, language, filename, mimetype = compile_paste(args, opts.language)
|
||||
except Exception as err:
|
||||
fail('Error while reading the file(s): %s' % err, 2)
|
||||
if not data:
|
||||
fail('Aborted, no content to paste.', 4)
|
||||
|
||||
# create paste
|
||||
code = make_utf8(data, opts.encoding)
|
||||
pid = create_paste(code, language, filename, mimetype, opts.private)
|
||||
url = '%sshow/%s/' % (_server_name, pid)
|
||||
print(url)
|
||||
if opts.open_browser:
|
||||
open_webbrowser(url)
|
||||
if opts.clipboard:
|
||||
copy_url(url)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
69
pastedb/lodgeit_python3.py
Executable file
69
pastedb/lodgeit_python3.py
Executable file
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
LodgeIt!
|
||||
~~~~~~~~
|
||||
|
||||
A script that pastes stuff into the lodgeit pastebin.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
VERSION = '0.3'
|
||||
SERVER_NAME = 'http://paste.openstack.org/'
|
||||
|
||||
def upload_paste(paste_content, title=None, language=None, private=None):
|
||||
"""
|
||||
Uploads a paste to LodgeIt!
|
||||
|
||||
:param paste_content: the content of the paste to upload
|
||||
:param title: the title of the paste (optional)
|
||||
:param language: the language of the paste (optional)
|
||||
:param private: whether the paste should be private (optional)
|
||||
:return: the URL of the uploaded paste
|
||||
"""
|
||||
# build the POST data
|
||||
data = {
|
||||
'content': paste_content.encode('utf-8'),
|
||||
'format': 'text',
|
||||
}
|
||||
if title is not None:
|
||||
data['name'] = title
|
||||
if language is not None:
|
||||
data['language'] = language
|
||||
if private is not None:
|
||||
data['private'] = private
|
||||
|
||||
# make the request
|
||||
url = urllib.parse.urljoin(SERVER_NAME, '/pastes')
|
||||
request = urllib.request.Request(url, data=urllib.parse.urlencode(data).encode('utf-8'))
|
||||
response = urllib.request.urlopen(request)
|
||||
|
||||
# parse the response and return the URL of the new paste
|
||||
location = response.getheader('Location')
|
||||
if location is None:
|
||||
raise ValueError('Could not find the URL of the new paste')
|
||||
return location
|
||||
|
||||
def main():
|
||||
# parse the command-line arguments
|
||||
parser = argparse.ArgumentParser(description='Upload a paste to LodgeIt!')
|
||||
parser.add_argument('filename', help='the name of the file to upload')
|
||||
parser.add_argument('--title', help='the title of the paste')
|
||||
parser.add_argument('--language', help='the language of the paste')
|
||||
parser.add_argument('--private', action='store_true', help='make the paste private')
|
||||
args = parser.parse_args()
|
||||
|
||||
# read the content of the file to upload
|
||||
with open(args.filename, 'r') as f:
|
||||
paste_content = f.read()
|
||||
|
||||
# upload the paste and print the URL of the new paste
|
||||
url = upload_paste(paste_content, args.title, args.language, args.private)
|
||||
print(url)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
30
pastedb/old_upload.py
Normal file
30
pastedb/old_upload.py
Normal file
@ -0,0 +1,30 @@
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import hashlib
|
||||
from services import pastie, dpaste, rentry, defau, sprunge, opendev
|
||||
from data import data
|
||||
|
||||
def save(data):
|
||||
# Upload to the available services
|
||||
paste_dict = {'name': name}
|
||||
successes = []
|
||||
failures = []
|
||||
for service in [defau]:
|
||||
try:
|
||||
result = service.upload(data)
|
||||
add_data(result["service"], result["key"], result["md5sum"] )
|
||||
successes.append(result['name'])
|
||||
except Exception as e:
|
||||
failures.append(f"{service.__name__}: {str(e)}")
|
||||
|
||||
# Print upload results
|
||||
print(f"Upload successful to {len(successes)}/{len(successes)+len(failures)} services:")
|
||||
for name in successes:
|
||||
print(f"- {name}")
|
||||
if failures:
|
||||
print("Upload failed to the following services:")
|
||||
for error in failures:
|
||||
print(f"- {error}")
|
||||
|
||||
print(f"Your paste trace is: {name}")
|
26
pastedb/opendev.py
Executable file
26
pastedb/opendev.py
Executable file
@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import requests
|
||||
|
||||
url = 'https://paste.opendev.org/json/'
|
||||
|
||||
# read input from stdin
|
||||
input_str = sys.stdin.read()
|
||||
|
||||
# create data for new paste
|
||||
data = {
|
||||
'language': '',
|
||||
'code': input_str,
|
||||
'private': False
|
||||
}
|
||||
|
||||
# send request to create new paste
|
||||
response = requests.post(url + '?method=pastes.newPaste', data=data)
|
||||
|
||||
# extract URL of newly created paste from response
|
||||
paste_id = response.text.strip()
|
||||
paste_url = f'https://paste.opendev.org/show/{paste_id}'
|
||||
|
||||
# print URL of newly created paste
|
||||
print(paste_url)
|
||||
|
45
pastedb/opendev_2nd.py
Executable file
45
pastedb/opendev_2nd.py
Executable file
@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
url = 'https://paste.opendev.org/json/'
|
||||
|
||||
# read input from stdin
|
||||
data = '{ "name": "joe", "age": 55}'
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
content = json.dumps(data)
|
||||
|
||||
# create JSON payload for new paste
|
||||
payload = {
|
||||
'language': 'text',
|
||||
'code': content,
|
||||
'private': False
|
||||
#'expire': '1day'
|
||||
}
|
||||
|
||||
# send request to create new paste
|
||||
response = requests.post(url + '?method=pastes.newPaste', json=payload)
|
||||
|
||||
status = response.status_code
|
||||
paste_id = response.json()['data']
|
||||
|
||||
if status == 200:
|
||||
#print(f'JSON object uploaded to dpaste.com: {dpaste_url}')
|
||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||
|
||||
return {
|
||||
'service': 'url',
|
||||
'key': paste_id,
|
||||
'md5sum': md5sum,
|
||||
'name': 'opendev_' + paste_id + '_' + md5sum[:5]
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
||||
# we can get the paste back by:
|
||||
## $ > curl -d '{"paste_id":819463}' -H 'Content-Type: application/json' https://paste.opendev.org/json/?method=pastes.getPaste |jq .data.code
|
12
pastedb/paste_dict.json
Normal file
12
pastedb/paste_dict.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "1680462198-dxx",
|
||||
"pastie": {
|
||||
"service": "pastie",
|
||||
"key": "jpllic",
|
||||
"md5sum": "5e87ba1e0d151a399c0418343842b94d"
|
||||
},
|
||||
"ogqfsh": {
|
||||
"url": "https://pastie.io/ogqfsh",
|
||||
"service": "Pastie"
|
||||
}
|
||||
}
|
1
pastedb/pastedb01/data.json
Normal file
1
pastedb/pastedb01/data.json
Normal file
@ -0,0 +1 @@
|
||||
e71ad7d7815eef99ea4545da4ea22e9977c10c62c8e57cc794228e553c44e22f80cf5733aa00a53b6794b1e2732ab093eb63914155137988e58f8906acc5b57d02a981a2320a5b72a99ceaa5b68e2401
|
0
pastedb/pastedb01/data/__init__.py
Normal file
0
pastedb/pastedb01/data/__init__.py
Normal file
BIN
pastedb/pastedb01/data/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/data/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/data/__pycache__/data.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/data/__pycache__/data.cpython-310.pyc
Normal file
Binary file not shown.
122
pastedb/pastedb01/data/data.py
Normal file
122
pastedb/pastedb01/data/data.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""
|
||||
This module provides functions for managing and saving data in a dictionary object. It also supports encryption and decryption of the data when saving and loading it to/from disk. The functions in this module include:
|
||||
|
||||
set_encryption_key(key: bytes): sets the encryption key to be used for encrypting the data.
|
||||
add_data(service_tag: str, key: str, md5sum: str) -> str: adds data for a service to the data dictionary.
|
||||
save_data(filename: str, data: dict, key=None): writes the data dictionary to disk as a JSON object. If a key is provided, it encrypts the data using the AES symmetric encryption algorithm before writing to disk.
|
||||
encrypt_data(data: bytes) -> bytes: encrypts data using the AES symmetric encryption algorithm.
|
||||
load_data(file_path: str) -> Dict: loads the data from a file and returns it as a dictionary. If the file is encrypted, it uses the provided key to decrypt it before returning the data.
|
||||
|
||||
This module depends on the following packages: hashlib, Crypto, and collections.
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from Cryptodome.Cipher import AES
|
||||
from Cryptodome.Util import Padding
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
data = defaultdict(lambda: {"timestamp": 0, "services": {}})
|
||||
|
||||
_ENCRYPTION_KEY = None
|
||||
|
||||
def set_encryption_key(key: bytes):
|
||||
global _ENCRYPTION_KEY
|
||||
_ENCRYPTION_KEY = key
|
||||
|
||||
|
||||
def add_data(service_tag: str, key: str, md5sum: str) -> str:
|
||||
"""
|
||||
Adds data for a service to the `data` dictionary.
|
||||
|
||||
Parameters:
|
||||
service_tag (str): A string representing the service being added.
|
||||
key (str): A string representing the key for the service being added.
|
||||
md5sum (str): A string representing the MD5 checksum for the service being added.
|
||||
|
||||
Returns:
|
||||
str: A string representing the unique ID of the run that the data was added to.
|
||||
"""
|
||||
|
||||
# Generate a unique ID for the run
|
||||
run_id = f"run-{hashlib.sha256(str(data).encode()).hexdigest()[:6]}"
|
||||
timestamp = int(time.time())
|
||||
|
||||
# Add the service data to the run
|
||||
data[run_id]["timestamp"] = timestamp
|
||||
data[run_id]["services"][service_tag] = {"key": key, "md5sum": md5sum}
|
||||
|
||||
return run_id
|
||||
|
||||
|
||||
def save_data(filename: str, data: dict, key=None):
|
||||
"""
|
||||
Writes the data dictionary to disk as a JSON object.
|
||||
|
||||
Parameters:
|
||||
filename (str): A string representing the filename to write the data to.
|
||||
data (dict): A dictionary representing the data to be written to disk.
|
||||
key (bytes): Optional bytes representing a key to use for encryption.
|
||||
"""
|
||||
with open(filename, "w") as f:
|
||||
# Serialize the data dictionary as a JSON object
|
||||
json_data = json.dumps(data)
|
||||
|
||||
# If a key is provided, encrypt the JSON data
|
||||
if _ENCRYPTION_KEY:
|
||||
# Encrypt the data using the key
|
||||
encrypted_data = encrypt_data(json_data.encode())
|
||||
|
||||
# Write the encrypted data to the file
|
||||
f.write(encrypted_data.hex())
|
||||
else:
|
||||
# Write the unencrypted JSON data to the file
|
||||
print("you need to set the encryption key first.")
|
||||
|
||||
|
||||
def encrypt_data(data: bytes) -> bytes:
|
||||
"""
|
||||
Encrypts data using the AES symmetric encryption algorithm.
|
||||
|
||||
Parameters:
|
||||
data (bytes): A bytes object representing the data to be encrypted.
|
||||
|
||||
Returns:
|
||||
bytes: A bytes object representing the encrypted data.
|
||||
"""
|
||||
# Generate a random initialization vector (IV)
|
||||
iv = os.urandom(AES.block_size)
|
||||
|
||||
# Pad the data to a multiple of the block size
|
||||
padded_data = Padding.pad(data, AES.block_size)
|
||||
|
||||
# Create an AES cipher object
|
||||
cipher = AES.new(_ENCRYPTION_KEY, AES.MODE_CBC, iv)
|
||||
|
||||
# Encrypt the data using CBC mode
|
||||
encrypted_data = cipher.encrypt(padded_data)
|
||||
|
||||
# Prepend the IV to the encrypted data
|
||||
return iv + encrypted_data
|
||||
|
||||
|
||||
def load_data(file_path: str, key=None) -> dict:
|
||||
"""
|
||||
Load the data from a file and return it as a dictionary.
|
||||
|
||||
:param file_path: The path to the file to load.
|
||||
:param key: The key to use to decrypt the file.
|
||||
:return: A dictionary representing the data from the file.
|
||||
"""
|
||||
if _ENCRYPTION_KEY:
|
||||
with open(file_path, "rb") as f:
|
||||
ciphertext = f.read()
|
||||
|
||||
fernet = Fernet(_ENCRYPTION_KEY.encode())
|
||||
plaintext = fernet.decrypt(ciphertext)
|
||||
return json.loads(plaintext.decode())
|
||||
else:
|
||||
print("you need to set the encryption key first.")
|
||||
return '{"you need to set the encryption key first."}'
|
BIN
pastedb/pastedb01/external/pastebinit_1.5.1-1_all.deb
vendored
Normal file
BIN
pastedb/pastedb01/external/pastebinit_1.5.1-1_all.deb
vendored
Normal file
Binary file not shown.
161
pastedb/pastedb01/external/rentry
vendored
Executable file
161
pastedb/pastedb01/external/rentry
vendored
Executable file
@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import getopt
|
||||
import http.cookiejar
|
||||
import sys
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.cookies import SimpleCookie
|
||||
from json import loads as json_loads
|
||||
from os import environ
|
||||
|
||||
_headers = {"Referer": 'https://rentry.co'}
|
||||
|
||||
|
||||
class UrllibClient:
|
||||
"""Simple HTTP Session Client, keeps cookies."""
|
||||
|
||||
def __init__(self):
|
||||
self.cookie_jar = http.cookiejar.CookieJar()
|
||||
self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookie_jar))
|
||||
urllib.request.install_opener(self.opener)
|
||||
|
||||
def get(self, url, headers={}):
|
||||
request = urllib.request.Request(url, headers=headers)
|
||||
return self._request(request)
|
||||
|
||||
def post(self, url, data=None, headers={}):
|
||||
postdata = urllib.parse.urlencode(data).encode()
|
||||
request = urllib.request.Request(url, postdata, headers)
|
||||
return self._request(request)
|
||||
|
||||
def _request(self, request):
|
||||
response = self.opener.open(request)
|
||||
response.status_code = response.getcode()
|
||||
response.data = response.read().decode('utf-8')
|
||||
return response
|
||||
|
||||
|
||||
def raw(url):
|
||||
client = UrllibClient()
|
||||
return json_loads(client.get('https://rentry.co/api/raw/{}'.format(url)).data)
|
||||
|
||||
|
||||
def new(url, edit_code, text):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'url': url,
|
||||
'edit_code': edit_code,
|
||||
'text': text
|
||||
}
|
||||
|
||||
return json_loads(client.post('https://rentry.co/api/new', payload, headers=_headers).data)
|
||||
|
||||
|
||||
def edit(url, edit_code, text):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'edit_code': edit_code,
|
||||
'text': text
|
||||
}
|
||||
|
||||
return json_loads(client.post('https://rentry.co/api/edit/{}'.format(url), payload, headers=_headers).data)
|
||||
|
||||
|
||||
def usage():
|
||||
print('''
|
||||
Usage: rentry {new | edit | raw} {-h | --help} {-u | --url} {-p | --edit-code} text
|
||||
|
||||
Commands:
|
||||
new create a new entry
|
||||
edit edit an existing entry
|
||||
raw get raw markdown text of an existing entry
|
||||
|
||||
Options:
|
||||
-h, --help show this help message and exit
|
||||
-u, --url URL url for the entry, random if not specified
|
||||
-p, --edit-code EDIT-CODE edit code for the entry, random if not specified
|
||||
|
||||
Examples:
|
||||
rentry new 'markdown text' # new entry with random url and edit code
|
||||
rentry new -p pw -u example 'text' # with custom edit code and url
|
||||
rentry edit -p pw -u example 'text' # edit the example entry
|
||||
cat FILE | rentry new # read from FILE and paste it to rentry
|
||||
cat FILE | rentry edit -p pw -u example # read from FILE and edit the example entry
|
||||
rentry raw -u example # get raw markdown text
|
||||
rentry raw -u https://rentry.co/example # -u accepts absolute and relative urls
|
||||
''')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
environ.pop('POSIXLY_CORRECT', None)
|
||||
opts, args = getopt.gnu_getopt(sys.argv[1:], "hu:p:", ["help", "url=", "edit-code="])
|
||||
except getopt.GetoptError as e:
|
||||
sys.exit("error: {}".format(e))
|
||||
|
||||
command, url, edit_code, text = None, '', '', None
|
||||
|
||||
for o, a in opts:
|
||||
if o in ("-h", "--help"):
|
||||
usage()
|
||||
sys.exit()
|
||||
elif o in ("-u", "--url"):
|
||||
url = urllib.parse.urlparse(a).path.strip('/')
|
||||
elif o in ("-p", "--edit-code"):
|
||||
edit_code = a
|
||||
|
||||
command = (args[0:1] or [None])[0]
|
||||
command or sys.exit(usage())
|
||||
command in ['new', 'edit', 'raw'] or sys.exit('error: command must be new, edit or raw')
|
||||
|
||||
text = (args[1:2] or [None])[0]
|
||||
if not text and command != 'raw':
|
||||
text = sys.stdin.read().strip()
|
||||
text or sys.exit('error: text is required')
|
||||
|
||||
if command == 'new':
|
||||
response = new(url, edit_code, text)
|
||||
if response['status'] != '200':
|
||||
print('error: {}'.format(response['content']))
|
||||
try:
|
||||
for i in response['errors'].split('.'):
|
||||
i and print(i)
|
||||
sys.exit(1)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Url: {}\nEdit code: {}'.format(response['url'], response['edit_code']))
|
||||
|
||||
elif command == 'edit':
|
||||
url or sys.exit('error: url is required')
|
||||
edit_code or sys.exit('error: edit code is required')
|
||||
|
||||
response = edit(url, edit_code, text)
|
||||
if response['status'] != '200':
|
||||
print('error: {}'.format(response['content']))
|
||||
try:
|
||||
for i in response['errors'].split('.'):
|
||||
i and print(i)
|
||||
sys.exit(1)
|
||||
except:
|
||||
sys.exit(1)
|
||||
else:
|
||||
print('Ok')
|
||||
|
||||
elif command == 'raw':
|
||||
url or sys.exit('error: url is required')
|
||||
response = raw(url)
|
||||
if response['status'] != '200':
|
||||
sys.exit('error: {}'.format(response['content']))
|
||||
print(response['content'])
|
9
pastedb/pastedb01/paste_dict.json
Normal file
9
pastedb/pastedb01/paste_dict.json
Normal file
@ -0,0 +1,9 @@
|
||||
|
||||
{
|
||||
"name": "1680593168-456462",
|
||||
"defau_4e062": {
|
||||
"service": "p.defau.lt",
|
||||
"key": "https://p.defau.lt/?QtZaVCSgOsVMmI1xS_ofqw",
|
||||
"md5sum": "4e062894f660d3c69640129f9fd0a09e"
|
||||
}
|
||||
}
|
0
pastedb/pastedb01/services/__init__.py
Normal file
0
pastedb/pastedb01/services/__init__.py
Normal file
BIN
pastedb/pastedb01/services/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/defau.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/defau.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/dpaste.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/dpaste.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/opendev.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/opendev.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/paste2.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/paste2.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/pastie.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/pastie.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/rentry.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/rentry.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/sprunge.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/sprunge.cpython-310.pyc
Normal file
Binary file not shown.
BIN
pastedb/pastedb01/services/__pycache__/termbin.cpython-310.pyc
Normal file
BIN
pastedb/pastedb01/services/__pycache__/termbin.cpython-310.pyc
Normal file
Binary file not shown.
38
pastedb/pastedb01/services/defau.py
Normal file
38
pastedb/pastedb01/services/defau.py
Normal file
@ -0,0 +1,38 @@
|
||||
"""
|
||||
This module defines functions to interact with the p.defau.lt service for uploading and retrieving code snippets.
|
||||
|
||||
Functions:
|
||||
- get_service_tag(): Returns a string representing the service tag for p.defau.lt.
|
||||
- upload(data): Uploads a code snippet to p.defau.lt and returns a dictionary containing metadata about the upload.
|
||||
- get(trace): Retrieves the code snippet associated with the provided trace from p.defau.lt.
|
||||
"""
|
||||
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
URL = 'https://p.defau.lt/submit.php'
|
||||
NAME_PREFIX = 'defau_'
|
||||
|
||||
def get_service_tag():
|
||||
return 'p.defau.lt'
|
||||
|
||||
def upload(data):
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
try:
|
||||
response = requests.post(URL, data={'code': json_data})
|
||||
response.raise_for_status()
|
||||
key = response.url
|
||||
name = f"{NAME_PREFIX}{md5sum[:5]}"
|
||||
return {'name': name, 'service': get_service_tag(), 'key': key, 'md5sum': md5sum}
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get(trace):
|
||||
url = trace[key]
|
||||
response = requests.request.get(url)
|
||||
return response.content
|
||||
|
37
pastedb/pastedb01/services/dpaste.py
Normal file
37
pastedb/pastedb01/services/dpaste.py
Normal file
@ -0,0 +1,37 @@
|
||||
import hashlib
|
||||
import json
|
||||
import requests
|
||||
|
||||
NAME = 'dpaste'
|
||||
|
||||
def get_service_tag():
|
||||
return NAME
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
content = json.dumps(data)
|
||||
syntax = 'json'
|
||||
expiry_days = ''
|
||||
|
||||
r = requests.post('https://dpaste.com/api/v2/',
|
||||
data={'content': content,
|
||||
'syntax': syntax,
|
||||
'expiry_days': expiry_days},
|
||||
headers={'User-Agent': 'My Python Project'})
|
||||
|
||||
if r.status_code == 201:
|
||||
dpaste_url = r.headers['Location']
|
||||
#print(f'JSON object uploaded to dpaste.com: {dpaste_url}')
|
||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||
|
||||
return {
|
||||
'service': NAME,
|
||||
'key': dpaste_url,
|
||||
'md5sum': md5sum,
|
||||
'name': NAME + '_' + dpaste_url.rsplit('/', 1)[-1]
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
||||
|
70
pastedb/pastedb01/services/old_rentry.py
Normal file
70
pastedb/pastedb01/services/old_rentry.py
Normal file
@ -0,0 +1,70 @@
|
||||
import json
|
||||
import hashlib
|
||||
import http.cookiejar
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.cookies import SimpleCookie
|
||||
|
||||
_headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}
|
||||
|
||||
class UrllibClient:
|
||||
"""Simple HTTP Session Client, keeps cookies."""
|
||||
|
||||
def __init__(self):
|
||||
self.cookie_jar = http.cookiejar.CookieJar()
|
||||
self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookie_jar))
|
||||
urllib.request.install_opener(self.opener)
|
||||
|
||||
def get(self, url, headers={}):
|
||||
request = urllib.request.Request(url, headers=headers)
|
||||
return self._request(request)
|
||||
|
||||
def post(self, url, data=None, headers={}):
|
||||
postdata = urllib.parse.urlencode(data).encode()
|
||||
request = urllib.request.Request(url, postdata, headers)
|
||||
return self._request(request)
|
||||
|
||||
def _request(self, request):
|
||||
response = self.opener.open(request)
|
||||
response.status_code = response.getcode()
|
||||
response.data = response.read().decode('utf-8')
|
||||
return response
|
||||
|
||||
|
||||
def json_loads(string):
|
||||
try:
|
||||
return json.loads(string)
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def upload(data):
|
||||
client, cookie = UrllibClient(), SimpleCookie()
|
||||
|
||||
cookie.load(vars(client.get('https://rentry.co'))['headers']['Set-Cookie'])
|
||||
csrftoken = cookie['csrftoken'].value
|
||||
|
||||
json_data = json.dumps(data)
|
||||
md5sum = hashlib.md5(json_data.encode('utf-8')).hexdigest()
|
||||
|
||||
payload = {
|
||||
'csrfmiddlewaretoken': csrftoken,
|
||||
'url': md5sum,
|
||||
'edit_code': '',
|
||||
'text': json_data
|
||||
}
|
||||
|
||||
response = client.post('https://rentry.co/api/new', payload, headers=_headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
json_response = json_loads(response.data)
|
||||
return {
|
||||
"service": "rentry",
|
||||
"name": json_response["slug"],
|
||||
"key": f"https://rentry.co/{json_response['slug']}",
|
||||
"md5sum": md5sum
|
||||
}
|
||||
else:
|
||||
return None
|
||||
|
44
pastedb/pastedb01/services/opendev.py
Executable file
44
pastedb/pastedb01/services/opendev.py
Executable file
@ -0,0 +1,44 @@
|
||||
import requests
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
URL = 'https://paste.opendev.org/json/'
|
||||
NAME = 'opendev'
|
||||
|
||||
def get_service_tag():
|
||||
return NAME
|
||||
|
||||
def upload(data):
|
||||
try:
|
||||
content = json.dumps(data)
|
||||
|
||||
# create JSON payload for new paste
|
||||
payload = {
|
||||
'language': 'text',
|
||||
'code': content,
|
||||
'private': False
|
||||
#'expire': '1day'
|
||||
}
|
||||
|
||||
# send request to create new paste
|
||||
response = requests.post(URL + '?method=pastes.newPaste', json=payload)
|
||||
|
||||
status = response.status_code
|
||||
paste_id = response.json()['data']
|
||||
|
||||
if status == 200:
|
||||
#print(f'JSON object uploaded to dpaste.com: {dpaste_url}')
|
||||
md5sum = hashlib.md5(content.encode('utf-8')).hexdigest()
|
||||
|
||||
return {
|
||||
'service': NAME,
|
||||
'key': URL[:-6],
|
||||
'md5sum': md5sum,
|
||||
'name': NAME + '_' + paste_id + '_' + md5sum[:5]
|
||||
}
|
||||
else:
|
||||
return None
|
||||
except requests.exceptions.RequestException:
|
||||
return None
|
||||
# we can get the paste back by:
|
||||
## $ > curl -d '{"paste_id":819463}' -H 'Content-Type: application/json' https://paste.opendev.org/json/?method=pastes.getPaste |jq .data.code
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user