diff --git a/README.md b/README.md index 6905560..3b64df2 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,6 @@ In `commons.py` there are the following configuration values which are global fo | `UPLOADS` | `files/` | server | The folder where the Flask server will store uploaded files | `JOURNALISTS` | `10` | server, source | How many journalists do we create and enroll. In general, this is realistic, in current SecureDrop usage it is way less. For demo purposes everybody knows it, in a real scenario it would not be needed. | | `ONETIMEKEYS` | `30` | journalist | How many ephemeral keys each journalist create, sign and uploads when required. | -| `CURVE` | `NIST384p` | server, source, journalist | The curve for all elliptic curve operations. It must be imported first from the python-ecdsa library. | | `MAX_MESSAGES` | `500` | server | How may potential messages the server sends to each party when they try to fetch messages. This basically must be more than the messages in the database, otherwise we need to develop a mechanism to group messages adding some bits of metadata. | | `CHUNK` | `512 * 1024` | source | The base size of every parts in which attachment are split/padded to. This is not the actual size on disk, cause that will be a bit more depending on the nacl SecretBox implementation. | @@ -482,7 +481,6 @@ No endpoints require authentication or sessions. The only data store is Redis an |`journalist_sig` | *base64(sigNR(JPK))* | |`journalist_fetching_key` | *base64(JCPK)* | |`journalist_fetching_sig` | *base64(sigJ(JCPK))* | -|`journalist_uid` | *hex(Hash(JPK))* | #### POST Adds *Newsroom* signed *Journalist* to the *Server*. @@ -518,7 +516,6 @@ curl -X GET "http://127.0.0.1:5000/journalists" "journalist_fetching_sig": , "journalist_key": , "journalist_sig": , - "journalist_uid": }, ... ], @@ -540,17 +537,17 @@ At this point *Source* must have a verified *NRPK* and must verify bo |`count` | Number of returned ephemeral keys. It should match the number of *Journalists*. If it does not, a specific *Journalist* bucket might be out of keys. | |`ephemeral_key` | *base64(JEPK)* | |`ephemeral_sig` | *base64(sigJ(JEPK))* | -|`journalist_uid` | *hex(Hash(JPK))* | +|`journalist_key` | *base64(JPK)* | #### POST Adds *n* *Journalist* signed ephemeral key agreement keys to Server. -The keys are stored in a Redis *set* specific per *Journalist*, which key is `journalist:`. In the demo implementation, the number of ephemeral keys to generate and upload each time is `commons.ONETIMEKEYS`. +The keys are stored in a Redis *set* specific per *Journalist*, which key is `journalist:`. In the demo implementation, the number of ephemeral keys to generate and upload each time is `commons.ONETIMEKEYS`. ``` curl -X POST -H "Content-Type: application/json" "http://127.0.0.1:5000/ephemeral_keys" --data { - "journalist_uid": , + "journalist_key": , "ephemeral_keys": [ { "ephemeral_key": , @@ -579,7 +576,7 @@ curl -X GET http://127.0.0.1:5000/ephemeral_keys { "ephemeral_key": , "ephemeral_sig": , - "journalist_uid": + "journalist_key": }, ... ], diff --git a/commons.py b/commons.py index 62b5507..2628c2c 100644 --- a/commons.py +++ b/commons.py @@ -1,12 +1,14 @@ import json from base64 import b64decode, b64encode -from hashlib import sha3_256 from os import path, stat from secrets import token_bytes -import nacl.secret import requests -from ecdsa import ECDH, NIST384p, SigningKey, VerifyingKey +from nacl.bindings import crypto_scalarmult +from nacl.encoding import Base64Encoder +from nacl.public import Box, PrivateKey, PublicKey +from nacl.secret import SecretBox +from nacl.signing import VerifyKey import pki @@ -23,13 +25,10 @@ JOURNALISTS = 10 # How many ephemeral keys each journalist create, sign and auploads when required ONETIMEKEYS = 30 -# The curve for all elliptic curve operations. It must be imported first from the python-ecdsa -# library. Ed25519 and Ed448, although supported by the lib, are not fully implemented -CURVE = NIST384p # How may entries the server sends to each party when they try to fetch messages # This basically must be more than the msssages in the database, otherwise we need # to develop a mechanism to group messages adding some bits of metadata -MAX_MESSAGES = 500 +MAX_MESSAGES = 1000 # The base size of every parts in which attachment are splitted/padded to. This # is not the actual size on disk, cause thet will be a bit more depending on # the nacl SecretBox implementation @@ -37,11 +36,8 @@ def add_journalist(journalist_key, journalist_sig, journalist_fetching_key, journalist_fetching_sig): - journalist_uid = sha3_256(journalist_key.verifying_key.to_string()).hexdigest() - journalist_key = b64encode(journalist_key.verifying_key.to_string()).decode("ascii") - journalist_sig = b64encode(journalist_sig).decode("ascii") - journalist_fetching_key = b64encode(journalist_fetching_key.verifying_key.to_string()).decode("ascii") - journalist_fetching_sig = b64encode(journalist_fetching_sig).decode("ascii") + journalist_key = journalist_key.verify_key.encode(Base64Encoder).decode("ascii") + journalist_fetching_key = journalist_fetching_key.public_key.encode(Base64Encoder).decode("ascii") response = requests.post(f"http://{SERVER}/journalists", json={ "journalist_key": journalist_key, @@ -50,7 +46,7 @@ def add_journalist(journalist_key, journalist_sig, journalist_fetching_key, jour "journalist_fetching_sig": journalist_fetching_sig }) assert (response.status_code == 200) - return journalist_uid + return True def get_journalists(intermediate_verifying_key): @@ -59,19 +55,17 @@ def get_journalists(intermediate_verifying_key): journalists = response.json()["journalists"] assert (len(journalists) == JOURNALISTS) for content in journalists: - journalist_verifying_key = pki.public_b642key(content["journalist_key"]) - journalist_fetching_verifying_key = pki.public_b642key(content["journalist_fetching_key"]) + journalist_verifying_key = VerifyKey(content["journalist_key"], Base64Encoder) + journalist_fetching_verifying_key = VerifyKey(content["journalist_fetching_key"], Base64Encoder) # pki.verify_key shall give an hard fault is a signature is off - pki.verify_key(intermediate_verifying_key, - journalist_verifying_key, - None, - b64decode(content["journalist_sig"]) - ) - pki.verify_key(intermediate_verifying_key, - journalist_fetching_verifying_key, - None, - b64decode(content["journalist_fetching_sig"]) - ) + pki.verify_key_func(intermediate_verifying_key, + journalist_verifying_key, + None, + content["journalist_sig"]) + pki.verify_key_func(journalist_verifying_key, + journalist_fetching_verifying_key, + None, + content["journalist_fetching_sig"]) return journalists @@ -81,53 +75,40 @@ def get_ephemeral_keys(journalists): ephemeral_keys = response.json()["ephemeral_keys"] assert (len(ephemeral_keys) == JOURNALISTS) ephemeral_keys_return = [] - checked_uids = set() + checked_pubkeys = set() for ephemeral_key_dict in ephemeral_keys: - journalist_uid = ephemeral_key_dict["journalist_uid"] + journalist_pubkey = ephemeral_key_dict["journalist_key"] for journalist in journalists: - if journalist_uid == journalist["journalist_uid"]: - ephemeral_key_dict["journalist_uid"] = journalist["journalist_uid"] + if journalist_pubkey == journalist["journalist_key"]: ephemeral_key_dict["journalist_key"] = journalist["journalist_key"] ephemeral_key_dict["journalist_fetching_key"] = journalist["journalist_fetching_key"] # add uids to a set - checked_uids.add(journalist_uid) - journalist_verifying_key = pki.public_b642key(journalist["journalist_key"]) - ephemeral_verifying_key = pki.public_b642key(ephemeral_key_dict["ephemeral_key"]) + checked_pubkeys.add(journalist_pubkey) + journalist_verifying_key = VerifyKey(journalist["journalist_key"], Base64Encoder) + ephemeral_verifying_key = VerifyKey(ephemeral_key_dict["ephemeral_key"], Base64Encoder) # We rely again on verify_key raising an exception in case of failure - pki.verify_key(journalist_verifying_key, - ephemeral_verifying_key, - None, - b64decode(ephemeral_key_dict["ephemeral_sig"])) + pki.verify_key_func(journalist_verifying_key, + ephemeral_verifying_key, + None, + ephemeral_key_dict["ephemeral_sig"]) ephemeral_keys_return.append(ephemeral_key_dict) # check that all keys are from different journalists - assert (len(checked_uids) == JOURNALISTS) + assert (len(checked_pubkeys) == JOURNALISTS) return ephemeral_keys_return def build_message(fetching_public_key, encryption_public_key): - fetching_public_key = VerifyingKey.from_string(b64decode(fetching_public_key), curve=CURVE) - encryption_public_key = VerifyingKey.from_string(b64decode(encryption_public_key), curve=CURVE) - - ecdh = ECDH(curve=CURVE) - # [SOURCE] PERMESSAGE-EPHEMERAL KEY (private) - message_key = SigningKey.generate(curve=CURVE) - message_public_key = b64encode(message_key.verifying_key.to_string()).decode("ascii") - # load the private key to generate the shared secret - ecdh.load_private_key(message_key) - - # [JOURNALIST] PERMESSAGE-EPHEMERAL KEY (public) - ecdh.load_received_public_key(encryption_public_key) - # generate the secret for encrypting the secret with the source_ephemeral+journo_ephemeral - # so that we have forward secrecy - encryption_shared_secret = ecdh.generate_sharedsecret_bytes() + fetching_public_key = PublicKey(fetching_public_key, Base64Encoder) + encryption_public_key = PublicKey(encryption_public_key, Base64Encoder) + + message_secret_key = PrivateKey.generate() + message_public_key = (message_secret_key.public_key.encode(Base64Encoder)).decode("ascii") # encrypt the message, we trust nacl safe defaults - box = nacl.secret.SecretBox(encryption_shared_secret[0:32]) + box = Box(message_secret_key, encryption_public_key) # generate the message gdh to send the server - message_gdh = b64encode(VerifyingKey.from_public_point( - pki.get_shared_secret(fetching_public_key, message_key), - curve=CURVE).to_string()).decode('ascii') + message_gdh = b64encode(crypto_scalarmult(message_secret_key.encode(), fetching_public_key.encode())) return message_public_key, message_gdh, box @@ -185,20 +166,14 @@ def fetch_messages_id(fetching_key): potential_messages = fetch() messages = [] - ecdh = ECDH(curve=CURVE) for message in potential_messages: - - ecdh.load_private_key(fetching_key) - ecdh.load_received_public_key_bytes(b64decode(message["gdh"])) - message_client_shared_secret = ecdh.generate_sharedsecret_bytes() - - box = nacl.secret.SecretBox(message_client_shared_secret[0:32]) + message_gdh = PublicKey(message["gdh"], Base64Encoder) + message_client_box = Box(fetching_key, message_gdh) try: - message_id = box.decrypt(b64decode(message["enc"])).decode('ascii') + message_id = message_client_box.decrypt(b64decode(message["enc"])).decode('ascii') messages.append(message_id) - except Exception: pass @@ -214,11 +189,8 @@ def fetch_messages_content(messages_id): def decrypt_message_ciphertext(private_key, message_public_key, message_ciphertext): - ecdh = ECDH(curve=CURVE) - ecdh.load_private_key(private_key) - ecdh.load_received_public_key_bytes(b64decode(message_public_key)) - encryption_shared_secret = ecdh.generate_sharedsecret_bytes() - box = nacl.secret.SecretBox(encryption_shared_secret[0:32]) + public_key = PublicKey(message_public_key, Base64Encoder) + box = Box(private_key, public_key) try: message_plaintext = json.loads(box.decrypt(b64decode(message_ciphertext)).decode('ascii')) return message_plaintext @@ -247,7 +219,7 @@ def upload_attachment(filename): part_len = len(part) read_size += part_len - box = nacl.secret.SecretBox(key) + box = SecretBox(key) encrypted_part = box.encrypt(part.ljust(CHUNK)) upload_response = send_file(encrypted_part) diff --git a/demo.sh b/demo.sh index 9d8cb21..59501a4 100755 --- a/demo.sh +++ b/demo.sh @@ -1,5 +1,6 @@ #!/usr/bin/env bash +killall flask 2>/dev/null sudo systemctl restart redis > /dev/null 2>&1 # start clean diff --git a/deploy_keys.py b/deploy_keys.py new file mode 100644 index 0000000..a725a00 --- /dev/null +++ b/deploy_keys.py @@ -0,0 +1,15 @@ +import requests +import commons + +with open("keys/root.public", "r") as f: + fpf_key = f.read() + +with open("keys/intermediate.public", "r") as f: + nr_key = f.read() + +with open("keys/intermediate.sig", "r") as f: + nr_sig = f.read() + +res = requests.post(f"http://{commons.SERVER}/keys", json={"fpf_key": fpf_key, "newsroom_key": nr_key, "newsroom_sig": nr_sig}) + +assert(res.status_code == 200) diff --git a/journalist.py b/journalist.py index ac280bd..808f7b8 100644 --- a/journalist.py +++ b/journalist.py @@ -2,29 +2,29 @@ import json from base64 import b64encode from datetime import datetime -from hashlib import sha3_256 from os import listdir, mkdir, path from time import time -import nacl.secret import requests -from ecdsa import SigningKey +from nacl.encoding import Base64Encoder, HexEncoder +from nacl.public import PrivateKey +from nacl.secret import SecretBox import commons import journalist_db import pki -def add_ephemeral_keys(journalist_key, journalist_id, journalist_uid): +def add_ephemeral_keys(journalist_key, journalist_id): ephemeral_keys = [] for key in range(commons.ONETIMEKEYS): # Generate an ephemeral key, sign it and load the signature - ephemeral_sig, ephemeral_key = pki.generate_ephemeral(journalist_key, journalist_id, journalist_uid) - ephemeral_keys.append({"ephemeral_key": b64encode(ephemeral_key.verifying_key.to_string()).decode("ascii"), - "ephemeral_sig": b64encode(ephemeral_sig).decode("ascii")}) + ephemeral_sig, ephemeral_key = pki.generate_ephemeral(journalist_key, journalist_id) + ephemeral_keys.append({"ephemeral_key": ephemeral_key.public_key.encode(Base64Encoder).decode("ascii"), + "ephemeral_sig": ephemeral_sig.signature.decode("ascii")}) # Send both to server, the server veifies the signature and the trust chain prior ro storing/publishing - response = requests.post(f"http://{commons.SERVER}/ephemeral_keys", json={"journalist_uid": journalist_uid, + response = requests.post(f"http://{commons.SERVER}/ephemeral_keys", json={"journalist_key": journalist_key.verify_key.encode(Base64Encoder), "ephemeral_keys": ephemeral_keys}) return (response.status_code == 200) @@ -33,14 +33,14 @@ def add_ephemeral_keys(journalist_key, journalist_id, journalist_uid): # Load the journalist ephemeral keys from the journalist key dirrectory. # On an actual implementation this would more likely be a sqlite (or sqlcipher) # database. -def load_ephemeral_keys(journalist_key, journalist_id, journalist_uid): +def load_ephemeral_keys(journalist_key, journalist_id): ephemeral_keys = [] - key_file_list = listdir(f"{commons.DIR}journalists/{journalist_uid}/") + key_file_list = listdir(f"{commons.DIR}journalists/{journalist_key.verify_key.encode(HexEncoder).decode('ascii')}/") for file_name in key_file_list: if file_name.endswith('.key'): - with open(f"{commons.DIR}journalists/{journalist_uid}/{file_name}", "rb") as f: + with open(f"{commons.DIR}journalists/{journalist_key.verify_key.encode(HexEncoder).decode('ascii')}/{file_name}", "r") as f: key = f.read() - ephemeral_keys.append(SigningKey.from_pem(key)) + ephemeral_keys.append(PrivateKey(key, Base64Encoder)) return ephemeral_keys @@ -55,7 +55,7 @@ def decrypt_message(ephemeral_keys, message): return message_plaintext -def journalist_reply(message, reply, journalist_uid): +def journalist_reply(message, reply, journalist_key): # This function builds the per-message keys and returns a nacl encrypting box message_public_key, message_gdh, box = commons.build_message( message["source_fetching_public_key"], @@ -65,7 +65,7 @@ def journalist_reply(message, reply, journalist_uid): # Still it is client controlled, so in each client we shall watch out a bit. message_dict = {"message": reply, # do we want to sign messages? how do we attest source authoriship? - "sender": journalist_uid, + "sender": journalist_key.verify_key.encode(HexEncoder).decode('ascii'), # "receiver": "source_id_placeholder", # we could list the journalists involved in the conversation here # if the source choose not to pick everybody @@ -85,14 +85,14 @@ def main(args): journalist_id = args.journalist assert (journalist_id >= 0 and journalist_id < commons.JOURNALISTS) - journalist_uid, journalist_sig, journalist_key, journalist_fetching_sig, journalist_fetching_key = pki.load_and_verify_journalist_keypair(journalist_id) + journalist_sig, journalist_key, journalist_fetching_sig, journalist_fetching_key = pki.load_and_verify_journalist_keypair(journalist_id) jdb = journalist_db.JournalistDatabase('files/.jdb.sqlite3') if args.action == "upload_keys": - journalist_uid = commons.add_journalist(journalist_key, journalist_sig, journalist_fetching_key, journalist_fetching_sig) + commons.add_journalist(journalist_key, journalist_sig, journalist_fetching_key, journalist_fetching_sig) # Generate and upload a bunch (30) of ephemeral keys - add_ephemeral_keys(journalist_key, journalist_id, journalist_uid) + add_ephemeral_keys(journalist_key, journalist_id) elif args.action == "fetch": # Check if there are messages @@ -115,7 +115,7 @@ def main(args): elif args.action == "read": message_id = args.id message = commons.get_message(message_id) - ephemeral_keys = load_ephemeral_keys(journalist_key, journalist_id, journalist_uid) + ephemeral_keys = load_ephemeral_keys(journalist_key, journalist_id) message_plaintext = decrypt_message(ephemeral_keys, message) if message_plaintext: @@ -129,7 +129,7 @@ def main(args): else: message_plaintext["attachments"] = [] - sender = sha3_256(message_plaintext['source_encryption_public_key'].encode("ascii")).hexdigest() + sender = message_plaintext['source_encryption_public_key'].encode('ascii') print(f"[+] Successfully decrypted message {message_id}") print() print(f"\tID: {message_id}") @@ -148,7 +148,7 @@ def main(args): part_key = bytes.fromhex(part['key']) encrypted_part = commons.get_file(part["id"]) written_size += part["size"] - box = nacl.secret.SecretBox(part_key) + box = SecretBox(part_key) f.write(box.decrypt(encrypted_part)[0:part["size"]]) part_number += 1 @@ -165,9 +165,9 @@ def main(args): elif args.action == "reply": message_id = args.id message = commons.get_message(message_id) - ephemeral_keys = load_ephemeral_keys(journalist_key, journalist_id, journalist_uid) + ephemeral_keys = load_ephemeral_keys(journalist_key, journalist_id) message_plaintext = decrypt_message(ephemeral_keys, message) - journalist_reply(message_plaintext, args.message, journalist_uid) + journalist_reply(message_plaintext, args.message, journalist_key) elif args.action == "delete": message_id = args.id diff --git a/pki.py b/pki.py index 5a5d616..cae1dde 100644 --- a/pki.py +++ b/pki.py @@ -1,101 +1,82 @@ from base64 import b64decode -from hashlib import sha3_256 from os import mkdir, rmdir -import nacl.utils -from ecdsa import (InvalidCurveError, InvalidSharedSecretError, SigningKey, - VerifyingKey) -from ecdsa.ellipticcurve import INFINITY -from ecdsa.util import sigdecode_der, sigencode_der +from nacl.encoding import Base64Encoder, HexEncoder +from nacl.public import PrivateKey, PublicKey +from nacl.signing import SigningKey, VerifyKey import commons -# Used to deterministally generate keys based on the passphrase, only on the source side -# the class is kind of a hack: python-ecdsa wants a os.urandom() kind of interface -# but nacl.utils does not have an internal state even if seeded. -# Thus we use a seed to generate enough randoness for all the needed calls. Shall the -# pre-generated randomness end, an exception is forcefully raised. -class PRNG: - def __init__(self, seed): - assert (len(seed) == 32) - self.total_size = 4096 - self.seed = seed - self.status = 0 - self.data = nacl.utils.randombytes_deterministic(self.total_size, self.seed) +# Loads a saved python ed25519 key from disk, if signing=False, load just the public-key +def load_key(name, keytype='sig', private=False): - def deterministic_random(self, size): - if self.status + size >= self.total_size: - raise RuntimeError("Ran out of buffered random values") - return_data = self.data[self.status:self.status+size] - self.status += size - return return_data - - -def get_shared_secret(remote_pubkey, local_privkey): - if not (local_privkey.curve == remote_pubkey.curve): - raise InvalidCurveError("Curves for public key and private key is not equal.") - - # shared secret = PUBKEYtheirs * PRIVATEKEYours - result = (remote_pubkey.pubkey.point * local_privkey.privkey.secret_multiplier) - if result == INFINITY: - raise InvalidSharedSecretError("Invalid shared secret (INFINITY).") - - return result - - -def public_b642key(b64_verifying_key): - return VerifyingKey.from_string(b64decode(b64_verifying_key), curve=commons.CURVE) + if keytype == 'sig': + pub = VerifyKey + priv = SigningKey + elif keytype == 'enc': + pub = PublicKey + priv = PrivateKey + else: + return False + if private: + with open(f"{commons.DIR}/{name}.key", "r") as f: + private_key = priv(f.read(), Base64Encoder) + # assert (key.verify_key == verify_key) + return private_key + else: + with open(f"{commons.DIR}/{name}.public", "r") as f: + public_key = pub(f.read(), Base64Encoder) -# Loads a saved python ecdsa key from disk, if signing=False, load just the public-key -def load_key(name, signing=True): + return public_key - with open(f"{commons.DIR}/{name}.pem", "rb") as f: - verifying_key = VerifyingKey.from_pem(f.read()) - if signing: - with open(f"{commons.DIR}/{name}.key", "rb") as f: - key = SigningKey.from_pem(f.read()) - assert (key.verifying_key == verifying_key) - return key +# Generate a ed25519 keypair and save it to disk +def generate_key(name, keytype='sig'): + if keytype == 'sig': + generate_obj = SigningKey + elif keytype == 'enc': + generate_obj = PrivateKey else: - return verifying_key + return False + key = generate_obj.generate() -# Generate a python-ecdsa keypair and save it to disk -def generate_key(name): - key = SigningKey.generate(curve=commons.CURVE) + with open(f"{commons.DIR}/{name}.key", "w") as f: + f.write(key.encode(encoder=Base64Encoder).decode('ascii')) - with open(f"{commons.DIR}/{name}.key", "wb") as f: - f.write(key.to_pem(format="pkcs8")) - - with open(f"{commons.DIR}/{name}.pem", "wb") as f: - f.write(key.verifying_key.to_pem()) + with open(f"{commons.DIR}/{name}.public", "w") as f: + if keytype == 'sig': + f.write(key.verify_key.encode(encoder=Base64Encoder).decode('ascii')) + else: + f.write(key.public_key.encode(encoder=Base64Encoder).decode('ascii')) return key # Sign a given public key with the pubblid private key def sign_key(signing_pivate_key, signed_public_key, signature_name): - sig = signing_pivate_key.sign_deterministic( - signed_public_key.to_string(), - hashfunc=sha3_256, - sigencode=sigencode_der - ) + sig = signing_pivate_key.sign(signed_public_key.encode(), encoder=Base64Encoder) + + with open(signature_name, "w") as f: + f.write(sig.signature.decode('ascii')) - with open(signature_name, "wb") as f: - f.write(sig) + # signing_pivate_key.verify_key.verify(sig, encoder=Base64Encoder) + # sooo the message can be base64 but the signature has to be byes, so the encoder + # is applied only to the message apparently + # signing_pivate_key.verify_key.verify(sig.message, b64decode(sig.signature), encoder=Base64Encoder) return sig # Verify a signature -def verify_key(signing_public_key, signed_public_key, signature_name, sig=None): +def verify_key_func(signing_public_key, signed_public_key, signature_name, sig=None): if not sig: - with open(signature_name, "rb") as f: + with open(signature_name, "r") as f: sig = f.read() - signing_public_key.verify(sig, signed_public_key.to_string(), hashfunc=sha3_256, sigdecode=sigdecode_der) + + signing_public_key.verify(signed_public_key.encode(), b64decode(sig)) return sig @@ -107,55 +88,40 @@ def generate_pki(): mkdir(commons.DIR) root_key = generate_key("root") intermediate_key = generate_key("intermediate") - sign_key(root_key, intermediate_key.verifying_key, f"{commons.DIR}intermediate.sig") + sign_key(root_key, intermediate_key.verify_key, f"{commons.DIR}intermediate.sig") journalist_fetching_keys, journalist_keys = generate_journalists(intermediate_key) return root_key, intermediate_key, journalist_fetching_keys, journalist_keys def verify_root_intermediate(): - root_verifying_key = load_key("root", signing=False) - intermediate_verifying_key = load_key("intermediate", signing=False) - verify_key(root_verifying_key, intermediate_verifying_key, f"{commons.DIR}intermediate.sig") + root_verifying_key = load_key("root", keytype='sig', private=False) + intermediate_verifying_key = load_key("intermediate", keytype='sig', private=False) + verify_key_func(root_verifying_key, intermediate_verifying_key, f"{commons.DIR}intermediate.sig") return intermediate_verifying_key -def load_pki(): - root_key = load_key("root") - intermediate_key = load_key("intermediate") - verify_key(root_key.verifying_key, intermediate_key.verifying_key, f"{commons.DIR}intermediate.sig") - journalist_keys = [] - for j in range(commons.JOURNALISTS): - journalist_key = load_key(f"{commons.DIR}journalists/journalist_{j}") - journalist_keys.append(journalist_key) - verify_key(intermediate_key.verifying_key, - journalist_key.verifying_key, - f"{commons.DIR}journalists/journalist_{j}.sig") - return root_key, intermediate_key, journalist_keys - - def load_and_verify_journalist_keypair(journalist_id): intermediate_verifying_key = verify_root_intermediate() - journalist_key = load_key(f"journalists/journalist_{journalist_id}") - journalist_uid = sha3_256(journalist_key.verifying_key.to_string()).hexdigest() - journalist_sig = verify_key(intermediate_verifying_key, - journalist_key.verifying_key, - f"{commons.DIR}journalists/journalist_{journalist_id}.sig") - journalist_fetching_key = load_key(f"journalists/journalist_fetching_{journalist_id}") - journalist_fetching_sig = verify_key(intermediate_verifying_key, - journalist_fetching_key.verifying_key, - f"{commons.DIR}journalists/journalist_fetching_{journalist_id}.sig") + journalist_key = load_key(f"journalists/journalist_{journalist_id}", keytype='sig', private=True) + journalist_sig = verify_key_func(intermediate_verifying_key, + journalist_key.verify_key, + f"{commons.DIR}journalists/journalist_{journalist_id}.sig") + journalist_fetching_key = load_key(f"journalists/journalist_fetching_{journalist_id}", keytype='enc', private=True) + journalist_fetching_sig = verify_key_func(journalist_key.verify_key, + journalist_fetching_key.public_key, + f"{commons.DIR}journalists/journalist_fetching_{journalist_id}.sig") - return journalist_uid, journalist_sig, journalist_key, journalist_fetching_sig, journalist_fetching_key + return journalist_sig, journalist_key, journalist_fetching_sig, journalist_fetching_key def load_and_verify_journalist_verifying_keys(): intermediate_verifying_key = verify_root_intermediate() journalist_verying_keys = [] for j in range(commons.JOURNALISTS): - journalist_verifying_key = load_key(f"journalists/journalist_{j}", signing=False) - verify_key(intermediate_verifying_key, - journalist_verifying_key, - f"{commons.DIR}journalists/journalist_{j}.sig") + journalist_verifying_key = load_key(f"journalists/journalist_{j}", private=False) + verify_key_func(intermediate_verifying_key, + journalist_verifying_key, + f"{commons.DIR}journalists/journalist_{j}.sig") journalist_verying_keys.append(journalist_verifying_key) return journalist_verying_keys @@ -165,31 +131,31 @@ def generate_journalists(intermediate_key): journalist_fetching_keys = [] mkdir(f"{commons.DIR}/journalists/") for j in range(commons.JOURNALISTS): - journalist_key = generate_key(f"journalists/journalist_{j}") + journalist_key = generate_key(f"journalists/journalist_{j}", keytype='sig') journalist_keys.append(journalist_key) - sign_key(intermediate_key, journalist_key.verifying_key, f"{commons.DIR}journalists/journalist_{j}.sig") - journalist_fetching_key = generate_key(f"journalists/journalist_fetching_{j}") + sign_key(intermediate_key, journalist_key.verify_key, f"{commons.DIR}journalists/journalist_{j}.sig") + journalist_fetching_key = generate_key(f"journalists/journalist_fetching_{j}", keytype='enc') journalist_fetching_keys.append(journalist_fetching_key) - sign_key(intermediate_key, journalist_fetching_key.verifying_key, f"{commons.DIR}journalists/journalist_fetching_{j}.sig") + sign_key(journalist_key, journalist_fetching_key.public_key, f"{commons.DIR}journalists/journalist_fetching_{j}.sig") return journalist_fetching_keys, journalist_keys -def generate_ephemeral(journalist_key, journalist_id, journalist_uid): +def generate_ephemeral(journalist_key, journalist_id): try: - mkdir(f"{commons.DIR}/journalists/{journalist_uid}") + mkdir(f"{commons.DIR}/journalists/{journalist_key.verify_key.encode(HexEncoder).decode('ascii')}") except Exception: pass - key = SigningKey.generate(curve=commons.CURVE) - name = sha3_256(key.verifying_key.to_string()).hexdigest() + key = PrivateKey.generate() + name = key.public_key.encode(HexEncoder) - with open(f"{commons.DIR}/journalists/{journalist_uid}/{name}.key", "wb") as f: - f.write(key.to_pem(format="pkcs8")) + with open(f"{commons.DIR}/journalists/{journalist_key.verify_key.encode(HexEncoder).decode('ascii')}/{name}.key", "w") as f: + f.write(key.encode(Base64Encoder).decode('ascii')) - with open(f"{commons.DIR}/journalists/{journalist_uid}/{name}.pem", "wb") as f: - f.write(key.verifying_key.to_pem()) + with open(f"{commons.DIR}/journalists/{journalist_key.verify_key.encode(HexEncoder).decode('ascii')}/{name}.public", "w") as f: + f.write(key.public_key.encode(Base64Encoder).decode('ascii')) - sig = sign_key(journalist_key, key.verifying_key, f"{commons.DIR}/journalists/{journalist_uid}/{name}.sig") + sig = sign_key(journalist_key, key.public_key, f"{commons.DIR}/journalists/{journalist_key.verify_key.encode(HexEncoder).decode('ascii')}/{name}.sig") return sig, key diff --git a/requirements.txt b/requirements.txt index d4ec32d..1c42254 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,4 @@ redis flask -ecdsa pynacl -gmpy2 requests diff --git a/server.py b/server.py index 48b130c..01b876d 100644 --- a/server.py +++ b/server.py @@ -1,19 +1,21 @@ import json from base64 import b64decode, b64encode -from hashlib import sha3_256 from os import mkdir, remove from random import uniform from secrets import token_bytes, token_hex -from time import sleep -import nacl.secret -from ecdsa import ECDH, SigningKey, VerifyingKey from flask import Flask, request, send_file +from nacl.bindings import crypto_scalarmult +from nacl.encoding import Base64Encoder, HexEncoder +from nacl.public import Box, PrivateKey, PublicKey +from nacl.signing import VerifyKey from redis import Redis import commons import pki +from time import sleep + # bootstrap keys intermediate_verifying_key = pki.verify_root_intermediate() @@ -22,7 +24,8 @@ try: mkdir(f"{commons.UPLOADS}") -except Exception: +except Exception as e: + print(e) pass @@ -34,7 +37,7 @@ def index(): @app.route("/journalists", methods=["POST"]) def add_journalist(): content = request.json - print(content) + try: assert ("journalist_key" in content) assert ("journalist_sig" in content) @@ -43,36 +46,26 @@ def add_journalist(): except Exception: return {"status": "KO"}, 400 - journalist_verifying_key = pki.public_b642key(content["journalist_key"]) - journalist_fetching_verifying_key = pki.public_b642key(content["journalist_fetching_key"]) - try: - journalist_sig = pki.verify_key(intermediate_verifying_key, - journalist_verifying_key, - None, - b64decode(content["journalist_sig"])) - - journalist_fetching_sig = pki.verify_key(intermediate_verifying_key, - journalist_fetching_verifying_key, - None, - b64decode(content["journalist_fetching_sig"])) + journalist_verifying_key = VerifyKey(content["journalist_key"], Base64Encoder) + journalist_fetching_public_key = PublicKey(content["journalist_fetching_key"], Base64Encoder) + try: + journalist_sig = pki.verify_key_func(intermediate_verifying_key, + journalist_verifying_key, + None, + content["journalist_sig"]) + + journalist_fetching_sig = pki.verify_key_func(journalist_verifying_key, + journalist_fetching_public_key, + None, + content["journalist_fetching_sig"]) except Exception: return {"status": "KO"}, 400 - journalist_uid = sha3_256(journalist_verifying_key.to_string()).hexdigest() - redis.sadd("journalists", json.dumps({"journalist_uid": journalist_uid, - "journalist_key": b64encode( - journalist_verifying_key.to_string() - ).decode("ascii"), - "journalist_sig": b64encode( - journalist_sig - ).decode("ascii"), - "journalist_fetching_key": b64encode( - journalist_fetching_verifying_key.to_string() - ).decode("ascii"), - "journalist_fetching_sig": b64encode( - journalist_fetching_sig - ).decode("ascii"), - })) + + redis.sadd("journalists", json.dumps({"journalist_key": journalist_verifying_key.encode(Base64Encoder).decode("ascii"), + "journalist_sig": journalist_sig, + "journalist_fetching_key": journalist_fetching_public_key.encode(Base64Encoder).decode('ascii'), + "journalist_fetching_sig": journalist_fetching_sig})) return {"status": "OK"}, 200 @@ -130,36 +123,30 @@ def delete_file(file_id): def add_ephemeral_keys(): content = request.json try: - assert ("journalist_uid" in content) + assert ("journalist_key" in content) assert ("ephemeral_keys" in content) except Exception: return {"status": "KO"}, 400 - journalist_uid = content["journalist_uid"] + journalist_key = content["journalist_key"] journalists = redis.smembers("journalists") for journalist in journalists: journalist_dict = json.loads(journalist.decode("ascii")) - if journalist_dict["journalist_uid"] == journalist_uid: - journalist_verifying_key = pki.public_b642key(journalist_dict["journalist_key"]) + if journalist_dict["journalist_key"] == journalist_key: + journalist_verifying_key = VerifyKey(journalist_dict["journalist_key"], Base64Encoder) ephemeral_keys = content["ephemeral_keys"] for ephemeral_key_dict in ephemeral_keys: - ephemeral_key = b64decode(ephemeral_key_dict["ephemeral_key"]) - ephemeral_key_verifying_key = VerifyingKey.from_string(ephemeral_key, curve=commons.CURVE) - ephemeral_sig = b64decode(ephemeral_key_dict["ephemeral_sig"]) - ephemeral_sig = pki.verify_key( + ephemeral_key_verifying_key = VerifyKey(ephemeral_key_dict["ephemeral_key"], Base64Encoder) + ephemeral_sig = pki.verify_key_func( journalist_verifying_key, ephemeral_key_verifying_key, None, - ephemeral_sig) - redis.sadd(f"journalist:{journalist_uid}", - json.dumps({"ephemeral_key": b64encode( - ephemeral_key_verifying_key.to_string() - ).decode("ascii"), - "ephemeral_sig": b64encode( - ephemeral_sig - ).decode("ascii")})) + ephemeral_key_dict["ephemeral_sig"]) + redis.sadd(f"journalist:{journalist_verifying_key.encode(HexEncoder).decode('ascii')}", + json.dumps({"ephemeral_key": ephemeral_key_verifying_key.encode(Base64Encoder).decode("ascii"), + "ephemeral_sig": ephemeral_sig})) return {"status": "OK"}, 200 @@ -171,9 +158,8 @@ def get_ephemeral_keys(): for journalist in journalists: journalist_dict = json.loads(journalist.decode("ascii")) - journalist_uid = journalist_dict["journalist_uid"] - ephemeral_key_dict = json.loads(redis.spop(f"journalist:{journalist_uid}").decode("ascii")) - ephemeral_key_dict["journalist_uid"] = journalist_uid + ephemeral_key_dict = json.loads(redis.spop(f"journalist:{VerifyKey(journalist_dict['journalist_key'], Base64Encoder).encode(HexEncoder).decode('ascii')}").decode("ascii")) + ephemeral_key_dict["journalist_key"] = journalist_dict["journalist_key"] ephemeral_keys.append(ephemeral_key_dict) return {"status": "OK", "count": len(ephemeral_keys), "ephemeral_keys": ephemeral_keys}, 200 @@ -187,23 +173,15 @@ def get_fetch(): message_keys = redis.keys("message:*") for message_key in message_keys: # SERVER EPHEMERAL CHALLENGE KEY - request_ephemeral_key = SigningKey.generate(curve=commons.CURVE) + request_ephemeral_key = PrivateKey.generate() message_id = message_key.decode('ascii').split(":")[1] # retrieve the message and load the json message_dict = json.loads(redis.get(message_key).decode('ascii')) - message_server_gdh = VerifyingKey.from_public_point( - pki.get_shared_secret( - VerifyingKey.from_string(b64decode(message_dict["message_public_key"]), curve=commons.CURVE), - request_ephemeral_key), - curve=commons.CURVE).to_string() + message_server_gdh = crypto_scalarmult(request_ephemeral_key.encode(), b64decode(message_dict["message_public_key"])) # calculate the sared key for message_id encryption - ecdh = ECDH(curve=commons.CURVE) - ecdh.load_private_key(request_ephemeral_key) - ecdh.load_received_public_key_bytes(b64decode(message_dict["message_gdh"])) - message_server_shared_secret = ecdh.generate_sharedsecret_bytes() - box = nacl.secret.SecretBox(message_server_shared_secret[0:32]) + box = Box(request_ephemeral_key, PublicKey(message_dict["message_gdh"], encoder=Base64Encoder)) encrypted_message_id = box.encrypt(message_id.encode('ascii')) potential_messages.append({"gdh": b64encode(message_server_gdh).decode('ascii'), @@ -213,14 +191,14 @@ def get_fetch(): # TODO: add shuffling of the response dict for decoy in range(commons.MAX_MESSAGES - len(potential_messages)): potential_messages.append({ - "gdh": b64encode(SigningKey.generate(curve=commons.CURVE).verifying_key.to_string()).decode('ascii'), + "gdh": PrivateKey.generate().encode(Base64Encoder).decode('ascii'), # message_id are 32 bytes and encryption overhead is 64 bytes "enc": b64encode(token_bytes(32+72)).decode('ascii') } ) # TODO: add stronger timing attack mitigations (such as a random delay) - sleep(uniform(0, 3.0)) + sleep(uniform(0, 2.0)) assert (len(potential_messages) == commons.MAX_MESSAGES) diff --git a/source.py b/source.py index eb2533b..731b91a 100644 --- a/source.py +++ b/source.py @@ -1,12 +1,12 @@ import argparse import json -from base64 import b64encode from datetime import datetime -from hashlib import sha3_256 from secrets import token_bytes from time import time -from ecdsa import SigningKey +from nacl.encoding import Base64Encoder, RawEncoder +from nacl.hash import blake2b +from nacl.public import PrivateKey import commons import pki @@ -19,9 +19,8 @@ def generate_passphrase(): # this function derives an EC keypair given the passphrase # the prefix is useful for isolating key. A hash/kdf is used to generate the actual seeds def derive_key(passphrase, key_isolation_prefix): - key_seed = sha3_256(key_isolation_prefix.encode("ascii") + passphrase).digest() - key_prng = pki.PRNG(key_seed[0:32]) - key = SigningKey.generate(curve=commons.CURVE, entropy=key_prng.deterministic_random) + key_seed = blake2b(passphrase, salt=key_isolation_prefix.encode("ascii"), encoder=RawEncoder) + key = PrivateKey(key_seed) return key @@ -38,11 +37,11 @@ def send_submission(intermediate_verifying_key, passphrase, message, attachments # Add prefix for key isolation # [SOURCE] LONG-TERM MESSAGE KEY encryption_key = derive_key(passphrase, "encryption_key-") - source_encryption_public_key = b64encode(encryption_key.verifying_key.to_string()).decode("ascii") + source_encryption_public_key = encryption_key.public_key.encode(Base64Encoder).decode("ascii") # [SOURCE] LONG-TERM CHALLENGE KEY fetching_key = derive_key(passphrase, "fetching_key-") - source_fetching_public_key = b64encode(fetching_key.verifying_key.to_string()).decode("ascii") + source_fetching_public_key = fetching_key.public_key.encode(Base64Encoder).decode("ascii") # For every receiver (journalists), create a message for ephemeral_key_dict in ephemeral_keys: @@ -56,7 +55,7 @@ def send_submission(intermediate_verifying_key, passphrase, message, attachments # do we want to sign messages? how do we attest source authoriship? "source_fetching_public_key": source_fetching_public_key, "source_encryption_public_key": source_encryption_public_key, - "receiver": ephemeral_key_dict["journalist_uid"], + "receiver": ephemeral_key_dict["journalist_key"], # we could list the journalists involved in the conversation here # if the source choose not to pick everybody "group_members": [], @@ -64,9 +63,8 @@ def send_submission(intermediate_verifying_key, passphrase, message, attachments # we can add attachmenet pieces/id here "attachments": attachments} - message_ciphertext = b64encode(box.encrypt( - (json.dumps(message_dict)).ljust(1024).encode('ascii')) - ).decode("ascii") + message_ciphertext = box.encrypt( + (json.dumps(message_dict)).ljust(1024).encode('ascii'), encoder=Base64Encoder).decode("ascii") # Send the message to the server API using the generic /send endpoint commons.send_message(message_ciphertext, message_public_key, message_gdh)