diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..13566b8
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/.idea/bucket-common-types.iml b/.idea/bucket-common-types.iml
new file mode 100644
index 0000000..cf84ae4
--- /dev/null
+++ b/.idea/bucket-common-types.iml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..fb04b29
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/remote-targets.xml b/.idea/remote-targets.xml
new file mode 100644
index 0000000..562223f
--- /dev/null
+++ b/.idea/remote-targets.xml
@@ -0,0 +1,21 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..35eb1dd
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/bucket/bucket_guid.rs b/src/bucket/bucket_guid.rs
index 3a7f9f9..7499188 100644
--- a/src/bucket/bucket_guid.rs
+++ b/src/bucket/bucket_guid.rs
@@ -47,6 +47,14 @@ impl fmt::Display for BucketGuid {
}
}
impl BucketGuid {
+ /// Returns a 32-byte array representation of the BucketGuid.
+ pub fn to_bytes(&self) -> [u8; 32] {
+ let mut slice = [0u8; 32];
+ slice[0..16].copy_from_slice(self.user_id.as_bytes());
+ slice[16..32].copy_from_slice(self.bucket_id.as_bytes());
+ slice
+ }
+
/// Format the BucketGuid using the specified format.
pub fn fmt_with(&self, f: &mut fmt::Formatter<'_>, format: BucketGuidFormat) -> fmt::Result {
match format {
@@ -63,15 +71,7 @@ impl BucketGuid {
}
}
-//match format {
-// BucketGuidFormat::Hyphenated(uuid_format) => write!(f, "{}-{}", self.user_id, self.bucket_id),
-// BucketGuidFormat::Simple(uuid_format) => write!(
-// f,
-// "{}{}",
-// self.user_id,
-// self.bucket_id
-// ),
-//}
+
impl BucketGuid {
pub fn new(user_id: uuid::Uuid, bucket_id: uuid::Uuid) -> Self {
Self { user_id, bucket_id }
@@ -87,22 +87,10 @@ impl BucketGuid {
// Define the size of a ``BucketGuid`` in bytes.
pub const fn size() -> usize {
// Since each UUID is 16 bytes, the total length is 32 bytes
- let size:usize = 32;
- debug_assert_eq!(size, mem::size_of::());
- size
+ 32
}
}
-impl SlicePattern for BucketGuid {
- type Item = u8;
- /// 8-bit array collection of 32 items.
- fn as_slice(&self) -> &[Self::Item] {
- let mut slice = [0u8; 32];
- slice[0..16].copy_from_slice(self.user_id.as_bytes());
- slice[16..32].copy_from_slice(self.bucket_id.as_bytes());
- &slice
- }
-}
impl FromStr for BucketGuid {
type Err = BucketGuidParseError;
@@ -131,6 +119,11 @@ pub enum BucketGuidParseError {
#[cfg(test)]
mod tests {
use super::*;
+ #[test]
+ // Very important, checks the size to ensure there is no opsy
+ fn test_bucket_guid_size() {
+ debug_assert_eq!(BucketGuid::size(), std::mem::size_of::());
+ }
// Test the `new` method to create a new BucketGuid
#[test]
diff --git a/src/bucket/bucket_limits.rs b/src/bucket/bucket_limits.rs
index 6ce6435..b04e6e7 100644
--- a/src/bucket/bucket_limits.rs
+++ b/src/bucket/bucket_limits.rs
@@ -1,3 +1,4 @@
+
pub struct BucketLimits {
pub bucket_size_limit : usize,
pub bucket_file_count_limit: usize,
diff --git a/src/bucket/conditional_requests.rs b/src/bucket/conditional_requests.rs
index e69de29..7865e0b 100644
--- a/src/bucket/conditional_requests.rs
+++ b/src/bucket/conditional_requests.rs
@@ -0,0 +1,48 @@
+use core::range::Range;
+use time::OffsetDateTime;
+
+/// CAS
+/// Compare and swap os usually the conditional part of a request that must be met inorder for the request to be able to be completed.
+
+pub enum BucketHash {
+ Sha256([u8; 32]),
+ Sha512([u8; 64]),
+ None,
+ // add more..
+}
+
+
+pub struct DataForRange {
+ /// Byte range to compare against.
+ range: Range,
+ /// You are only able to do 1 Kbyte of compare and swap for data.
+ data: Vec,
+}
+
+
+
+
+pub enum Condition {
+ BucketMetadataCondition(BucketMetadataCondition),
+ FileCondition(FileCondition),
+}
+
+pub enum BucketMetadataCondition {
+ Hash(BucketHash), /// Will compare the hash to see if it matches.
+ Tag(Vec), /// every tag is an entity in a collection, you are able to check the tags for it.
+ ModifyDate(OffsetDateTime), /// When check if it's the last date.
+ Name(String),
+ Size(u64),
+}
+
+pub enum FileCondition {
+ Range(DataForRange),
+ /// Compares the entire file hash. Maybe TODO: Remove???
+ Data(BucketHash),
+
+ Size(u64),
+}
+
+pub struct ConditionalRequest {
+
+}
\ No newline at end of file
diff --git a/src/bucket/mod.rs b/src/bucket/mod.rs
index fcf2dca..b7fa5b6 100644
--- a/src/bucket/mod.rs
+++ b/src/bucket/mod.rs
@@ -5,5 +5,6 @@ pub mod bucket_feature_flags;
pub mod bucket_permission;
mod bucket_retention_policy;
pub mod bucket_compression;
-mod storage_operation_behavior_flags;
-mod bucket_limits;
\ No newline at end of file
+pub mod storage_operation_behavior_flags;
+pub mod bucket_limits;
+pub mod conditional_requests;
\ No newline at end of file
diff --git a/src/bucket/storage_operation_behavior_flags.rs b/src/bucket/storage_operation_behavior_flags.rs
index fe32224..948653d 100644
--- a/src/bucket/storage_operation_behavior_flags.rs
+++ b/src/bucket/storage_operation_behavior_flags.rs
@@ -10,7 +10,7 @@ bitflags::bitflags! {
/// Allow operations to overwrite existing data.
const SHOULD_OVERWRITE = 0b0000_0010;
- /// Indicates that the operation can be destructive to storage capacity of the bucket.
+ /// Indicates that the operation can be destructive to the storage capacity of the bucket.
const IS_CAPACITY_DESTRUCTIVE = 0b0000_0100;
}
diff --git a/src/encryption.rs b/src/encryption.rs
index 3159b33..a01b5b4 100644
--- a/src/encryption.rs
+++ b/src/encryption.rs
@@ -11,6 +11,7 @@ use std::fmt;
use std::fmt::Display;
use std::num::ParseIntError;
use std::str::FromStr;
+use pkcs8::ObjectIdentifier;
use serde::{Deserialize, Serialize};
use strum::{Display, EnumString};
@@ -25,6 +26,19 @@ pub enum EncryptionAlgorithm {
Custom(String),
}
+
+impl EncryptionAlgorithm {
+ fn oid() -> Option {
+ match Self {
+ EncryptionAlgorithm::None => { None }
+ EncryptionAlgorithm::AES256 => { Some(ObjectIdentifier::new("2.16.840.1.101.3.4.1.46").unwrap()) }
+ EncryptionAlgorithm::ChaCha20Poly1305 => { None }
+ EncryptionAlgorithm::XChaCha20Poly1305 => { None }
+ EncryptionAlgorithm::Custom(_) => { None }
+ }
+ }
+}
+
#[derive(EnumString, PartialEq, Debug, Serialize, strum::Display, Clone, Eq, Deserialize)]
#[repr(u8)]
pub enum Role {
@@ -47,9 +61,6 @@ pub struct BucketEncryptionScheme {
/// The encryption algorithm used to secure the data in the bucket.
/// This is represented by the `EncryptionAlgorithm` enum.
pub encryption: EncryptionAlgorithm,
- /// Derive function used for the bucket.
- /// Argon2iD is the most secure with PBKDF2 being less so but, docent require much memory.
- pub kdf: KeyDeriveFunction,
}
@@ -130,7 +141,6 @@ impl FromStr for BucketEncryptionScheme
responsible: role,
encryption: EncryptionAlgorithm::from_str(encryption.as_str()).unwrap(),
version,
- kdf: Default::default(),
})
}
}
diff --git a/src/key/derived_key.rs b/src/key/derived_key.rs
index dfae98f..1783e2c 100644
--- a/src/key/derived_key.rs
+++ b/src/key/derived_key.rs
@@ -7,6 +7,7 @@ use secrecy::ExposeSecret;
use sha3::digest;
use sha3::digest::Update;
use sha3::Sha3_256;
+use crate::encryption::EncryptionAlgorithm;
use crate::key::{CryptoHashDerivedKeyType, CryptoMasterKey, SecureGenericArray};
/// 256-bit key
@@ -37,6 +38,8 @@ impl CryptoHashDerivedKeyType for Sha3_256CryptoHashDeri
{
type Error = Infallible;
type CryptoHasher = Sha3_256;
+
+
/// Generates a `HashDerivedKey` from a master key and a nonce.
///
/// # Parameters
@@ -50,11 +53,11 @@ impl CryptoHashDerivedKeyType for Sha3_256CryptoHashDeri
hasher.update(master_key.as_slice());
hasher.update(nonce);
// Create a SecureGenericArray from the finalized hash
- Self {
+ Ok( Self {
secret: SecureGenericArray {
0: GenericArray::from_slice(&hasher.finalize()),
},
- }
+ })
}
}
diff --git a/src/key/master_key.rs b/src/key/master_key.rs
index 3597f02..2cdabec 100644
--- a/src/key/master_key.rs
+++ b/src/key/master_key.rs
@@ -9,7 +9,7 @@ use secrecy::ExposeSecret;
use sha3::{Digest, Sha3_256};
use std::convert::Infallible;
use digest::typenum;
-use pkcs8::PrivateKeyInfo;
+use pkcs8::{ObjectIdentifier, PrivateKeyInfo};
use pkcs8::spki::AlgorithmIdentifier;
use crate::key::{CryptoMasterKey, SecureGenericArray};
@@ -60,6 +60,11 @@ impl CryptoMasterKey for MasterKey256 {
}
+#[derive(thiserror::Error, Debug)]
+pub enum MasterKey256ParseError {
+
+}
+
impl TryFrom<&PasswordHash<'_>> for MasterKey256 {
type Error = Infallible;
@@ -74,7 +79,7 @@ impl TryFrom<&PasswordHash<'_>> for MasterKey256 {
})
}
}
-impl From> for MasterKey256 {
+impl From> for MasterKey256 {
fn from(value: SecureGenericArray) -> Self {
Self {
secrete: value,
@@ -86,7 +91,7 @@ impl From> for MasterKey256
impl TryInto> for MasterKey256 {
type Error = Infallible;
- fn try_into(self) -> Result, Self::Error> {
+ fn try_into(self) -> Result, Self::Error> {
Ok(PrivateKeyInfo {
algorithm: AlgorithmIdentifier {
oid: (),
@@ -98,26 +103,16 @@ impl TryInto> for MasterKey256 {
}
}
-impl TryFrom for MasterKey256 {
- type Error = Infallible;
- fn try_from(value: pkcs8::PrivateKeyInfo) -> Result {
- value.algorithm.oid
- Ok(
- Self {
- secrete: SecureGenericArray::from(value.private_key),
- }
+impl MasterKey256 {
+ pub fn oid() -> Option {
+ // Example OID for the master key (custom or private)
+ Some(
+ ObjectIdentifier::new("1.3.6.1.4.1.99999.1.1").unwrap()
)
}
}
-impl TryFrom for MasterKey256 {
- type Error = Infallible;
- fn try_from(value: pkc1::key) -> Result {
-
- }
-}
-
#[cfg(test)]
mod tests {
use crate::module::encryption::key::master_key::MasterKey256;
diff --git a/src/key/mod.rs b/src/key/mod.rs
index 478f573..fb9eb04 100644
--- a/src/key/mod.rs
+++ b/src/key/mod.rs
@@ -7,6 +7,7 @@ use generic_array::{ArrayLength, GenericArray};
use secrecy::ExposeSecret;
use std::fmt::Debug;
use zeroize::Zeroize;
+use crate::encryption::EncryptionAlgorithm;
pub mod derived_key;
pub mod master_key;
diff --git a/src/key/shard_master_key_generator.rs b/src/key/shard_master_key_generator.rs
index e2d68e9..54e1eaf 100644
--- a/src/key/shard_master_key_generator.rs
+++ b/src/key/shard_master_key_generator.rs
@@ -1,72 +1,101 @@
-use argon2::{Argon2, PasswordHash};
-use vsss_rs::{*, feldman};
-use elliptic_curve::ff::Field;
-use vsss_rs::elliptic_curve::Scalar;
-use vsss_rs::shamir::split_secret;
-use crate::key::CryptoMasterKey;
-use crate::key::master_key::MasterKey256;
+use argon2::password_hash::Salt;
+use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
+use vsss_rs::{combine_shares, shamir::split_secret, Gf256};
+use rand::thread_rng;
+use crate::encryption::Argon2IdParams;
+use crate::key::{CryptoMasterKey, MasterKey256};
-pub enum VerifiableSecretSharingSchemeAlgorithm
-{
- ShamirSecrete,
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum VerifiableSecretSharingSchemeAlgorithm {
+ ShamirSecret,
}
-
-/// https://docs.rs/vsss-rs/latest/vsss_rs/
+/// Parameters for Verifiable Secret Sharing Scheme
+#[derive(Clone, PartialEq, Eq, Debug)]
pub struct VerifiableSecretSharingSchemeParams {
pub algorithm: VerifiableSecretSharingSchemeAlgorithm,
- /// How many shares of the secrete is needed in order for it to be reconstructed.
+ /// Number of shares required to reconstruct the secret
pub threshold: u32,
- /// How many secretes to generate.
+ /// Total number of shares to generate
pub limit: u32,
}
impl Default for VerifiableSecretSharingSchemeParams {
fn default() -> Self {
Self {
- algorithm: VerifiableSecretSharingSchemeAlgorithm::ShamirSecrete,
+ algorithm: VerifiableSecretSharingSchemeAlgorithm::ShamirSecret,
threshold: 2,
limit: 3,
}
-}
+ }
}
-/// Creates a series of master keys
-/// We split it into 5 parts,
-/// 1. password
-/// 2. server-secrete
-/// 3. recover-code
-///
-/// We require at-least 2 of the secretes, password is used to create the secretes so it is responsible for creating the keys.
-/// We use the 1 and 2nd for normal devices.
-/// When user loses the password they can use recover-code to gain the secrete back.
-/// If they don't have the recovery code they can if they want to store a backup in devices such as
-pub struct MasterKeyShareGenerator {
+impl VerifiableSecretSharingSchemeParams {
+ pub fn validate(&self) -> bool {
+ self.threshold > 0 && self.threshold <= self.limit
+ }
+}
+/// Struct for building master keys with verifiable secret sharing
+#[derive(Clone)]
+pub struct MasterKeyBuilder<'a> {
+ pub split_secret_params: Option,
+ pub password_hash: Option>,
}
+pub struct SplitSecret {
+ pub shares: Vec,
+}
-/// WE take the master key, split it into multiple parts
-impl MasterKeyShareGenerator {
- pub fn split(&self, password_hash: PasswordHash, params: VerifiableSecretSharingSchemeParams) -> Box<[impl CryptoMasterKey]> {
- let secret = password_hash.to_string();
- let a =
- let g = Gf256::try_from(secret).unwrap();
+pub struct GeneratedMasterKeys {
+ pub master_key: MasterKey256,
+ pub split_keys: Vec,
+}
- let res = split_secret::>(params.threshold, params.limit, g, None, &mut rng);
- assert!(res.is_ok());
- let (shares, verifier) = res.unwrap();
- for s in &shares {
- assert!(verifier.verify_share(s).is_ok());
+impl<'a> MasterKeyBuilder<'a> {
+ /// Add secret sharing parameters
+ pub fn with_split_secret(mut self, params: VerifiableSecretSharingSchemeParams) -> Self {
+ if params.validate() {
+ self.split_secret_params = Some(params);
+ } else {
+ panic!("Invalid secret sharing parameters");
}
- let res = combine_shares(&shares);
- assert!(res.is_ok());
- let secret_1: Scalar = res.unwrap();
- assert_eq!(secret, secret_1);
+ self
}
- pub fn combine() -> MasterKey256 {
+ /// Add the password hash
+ pub fn with_password_hash(mut self, password_hash: PasswordHash<'a>) -> Self {
+ self.password_hash = Some(password_hash);
+ self
+ }
- combine_shares()
+ /// Build and return generated master keys
+ pub fn build(self) -> GeneratedMasterKeys {
+ let params = self.split_secret_params.unwrap_or_default();
+ let password_hash = self.password_hash.expect("Password hash is required");
+
+ // Convert password hash to bytes for secret sharing
+ let secret_bytes = password_hash.to_string().into_bytes();
+ let secret_field = Gf256::try_from(secret_bytes.as_slice()).expect("Invalid secret for Gf256");
+
+ // Split the secret into shares
+ let mut rng = thread_rng();
+ let shares = split_secret(params.threshold as usize, params.limit as usize, &secret_field, &mut rng)
+ .expect("Failed to split secret");
+
+ // Verify and reconstruct the secret to ensure correctness
+ let reconstructed_secret = combine_shares(&shares).expect("Failed to combine shares");
+ assert_eq!(secret_field, reconstructed_secret);
+
+ // Wrap shares into `SplitSecret`
+ let split_keys = shares
+ .into_iter()
+ .map(|share| SplitSecret { shares: vec![share] })
+ .collect();
+
+ GeneratedMasterKeys {
+ master_key: MasterKey256::from(secret_field.to_bytes()),
+ split_keys,
+ }
}
-}
\ No newline at end of file
+}
diff --git a/src/lib.rs b/src/lib.rs
index 5f46acd..8522406 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,6 +1,7 @@
#![feature(slice_pattern)]
#![feature(slice_split_once)]
#![feature(associated_type_defaults)]
+#![feature(new_range_api)]
extern crate core;
use core::slice::SlicePattern;
diff --git a/src/share/centralized/centralized_share_link.rs b/src/share/centralized/centralized_share_link.rs
index a0e78e0..013fc2a 100644
--- a/src/share/centralized/centralized_share_link.rs
+++ b/src/share/centralized/centralized_share_link.rs
@@ -1,5 +1,4 @@
use crate::region::RegionCluster;
-use crate::share::centralized::centralized_secrete_share_link_token::CentralizedShareLinkToken;
use crate::share::centralized::centralized_share_link_token::CentralizedShareLinkToken;
use crate::share::fully_qualified_domain_name::FullyQualifiedDomainName;
use crate::share::token_path::TokenPath;
@@ -63,7 +62,7 @@ impl TryInto for CentralizedShareLink {
// Create URI, handling errors with `?`
let uri = Uri::builder()
.scheme(Scheme::HTTPS)
- .authority(&self.fqdn)
+ .authority(self.fqdn.to_string())
.path_and_query(self.path.to_string())
.build()?;
Ok(uri)
diff --git a/src/share/decentralized/decentralized_secrete_share_link_url_encoded.rs b/src/share/decentralized/decentralized_secrete_share_link_url_encoded.rs
index bed1c5a..d485b42 100644
--- a/src/share/decentralized/decentralized_secrete_share_link_url_encoded.rs
+++ b/src/share/decentralized/decentralized_secrete_share_link_url_encoded.rs
@@ -6,7 +6,7 @@ use std::str::FromStr;
use aes_gcm;
use aes_gcm::Aes256Gcm;
use base64::{Engine, engine::general_purpose};
-use digest::generic_array::GenericArray;
+use digest::generic_array::{ArrayLength, GenericArray};
use digest::OutputSizeUser;
use ed25519_compact::Noise;
use http::uri::Scheme;
@@ -14,7 +14,8 @@ use sha3::{Digest, Sha3_224};
use time::OffsetDateTime;
use crate::bucket::bucket_guid::BucketGuid;
use crate::bucket::bucket_permission::BucketPermissionFlags;
-use crate::encryption::{BucketEncryptionScheme, EncryptionAlgorithm};
+use crate::encryption::{BucketEncryptionScheme, EncryptionAlgorithm, Role};
+use crate::key::CryptoHashDerivedKeyType;
use crate::key::derived_key::CryptoHashDerivedKeySha3_256;
use crate::region::RegionCluster;
use crate::share::decentralized::decentralized_secrete_share_token::DecentralizedSecretShareToken;
@@ -82,19 +83,23 @@ fn hash_secret_share_link(
impl Display for DecentralizedSecretShareLink {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ let region_cluster = match self.region_cluster {
+ Some(region_cluster) => { write!()},
+ None => "",
+ };
write!(
f,
"{}{}.{}/{}/{}#{}#{}#{}#{}",
"https://",
- self.region_cluster.to_string(),
+ region_cluster,
DOMAIN_URL,
SECRET_SHARE_PATH_URL,
general_purpose::URL_SAFE_NO_PAD.encode(self.bucket_guid.as_slice()),
- self.bucket_key.as_slice(),
+ general_purpose::URL_SAFE_NO_PAD.encode(self.bucket_key.as_slice()),
general_purpose::URL_SAFE_NO_PAD.encode(self.permission.bits().to_be_bytes()),
general_purpose::URL_SAFE_NO_PAD
.encode(bincode::serialize(&self.expires).unwrap().as_slice()),
- general_purpose::URL_SAFE_NO_PAD.encode(self.signature.as_slice()),
+ general_purpose::URL_SAFE_NO_PAD.encode(self.signature.0.as_slice()),
)
}
}
@@ -192,8 +197,6 @@ pub enum SecretShareLinkVerifySignatureError {
}
impl DecentralizedSecretShareLink {
-
-
// Verify the signature against the signature file with special identifier.
pub fn verify_signature(
&self,
@@ -213,50 +216,30 @@ impl DecentralizedSecretShareLink {
}
const VERSION: SharingApiPath = SharingApiPath::V1;
- pub fn new(
+ pub fn new>(
region_cluster: Option,
bucket_guid: BucketGuid,
- bucket_key: aes_gcm::Key,
+ bucket_key: impl CryptoHashDerivedKeyType,
permission: BucketPermissionFlags,
expires: OffsetDateTime,
+ secrete_key: ed25519_compact::SecretKey,
+ bucket_encryption_scheme: BucketEncryptionScheme,
) -> Self {
- let share_token = DecentralizedSecretShareToken::new(®ion_cluster, &bucket_guid, &bucket_key, &permission, &expires);
- share_token.
-
- let noise = Noise::from_slice(bucket_guid.as_slice()).unwrap(); // Do we even need it?
- let signature = secret_key.sign(hash_output, Some(noise));
+ let token = DecentralizedSecretShareToken::new(®ion_cluster, &bucket_guid, &bucket_key, &permission, &expires);
+ token.sign(&secrete_key, &bucket_guid);
Self {
scheme: Scheme::HTTPS,
- region_cluster: Some(region_cluster),
+ region_cluster,
version: Self::VERSION,
-
bucket_guid,
- bucket_encryption: BucketEncryptionScheme {},
- bucket_key: (),
+ bucket_encryption: bucket_encryption_scheme,
+ bucket_key,
permission,
expires,
+ token,
signature,
}
}
- // TODO: There is no way for the server to invalidate a secret share link.
- /*
- Generate a token that is used by the server to identify the link.
- */
- pub fn get_token(&self) -> [u8; 32] {
- let mut hash_output = GenericArray::default();
- let hash_output = self.compute_hash::(
- self.region_cluster,
- self.bucket_guid,
- self.bucket_key,
- self.permission,
- self.expires,
- &mut hash_output,
- );
-
- let mut output: [u8; 32] = [0; 32];
- output.clone_from_slice(&hash_output);
- output
- }
}
#[derive(Debug, thiserror::Error)]
diff --git a/src/share/decentralized/decentralized_secrete_share_token.rs b/src/share/decentralized/decentralized_secrete_share_token.rs
index 5d9ea30..194cf02 100644
--- a/src/share/decentralized/decentralized_secrete_share_token.rs
+++ b/src/share/decentralized/decentralized_secrete_share_token.rs
@@ -7,8 +7,12 @@ use aes_gcm::aes::cipher::crypto_common::OutputSizeUser;
use core::slice::SlicePattern;
use digest::generic_array::GenericArray;
use digest::Digest;
+use ed25519_compact::{Noise, PublicKey, SecretKey};
+use generic_array::ArrayLength;
use time::OffsetDateTime;
+use crate::share::decentralized::decentralized_share_token::TokenSignature;
+#[derive(Clone, Debug)]
pub struct DecentralizedSecretShareToken {
pub token: SecreteShareLinkToken,
pub region: Option,
@@ -17,7 +21,7 @@ pub struct DecentralizedSecretShareToken {
impl DecentralizedSecretShareToken
{
- pub fn hash(
+ pub fn hash(
region_cluster: &Option,
bucket_guid: &BucketGuid,
bucket_key: &impl CryptoHashDerivedKeyType,
@@ -49,12 +53,23 @@ impl DecentralizedSecretShareToken
expires: &OffsetDateTime) -> Self{
let mut token = Self::hash(®ion_cluster,
&bucket_guid,
- &bucket_key,
+ &bucket_key.as_slice(),
&permission,
&expires);
Self {
- token: <[u8; 32]>::try_from(token.as_slice()).unwrap(),
+ token: SecreteShareLinkToken(<[u8; 32]>::try_from(token.as_slice()).unwrap()),
region: region_cluster.clone(),
}
}
+
+
+ pub fn sign(&self, secrete_key: &SecretKey, bucket_guid: &BucketGuid) -> TokenSignature {
+ //let noise = Noise::from_slice(self.region);
+ let noise = Noise::from_slice(bucket_guid.to_bytes()).unwrap();
+ TokenSignature(secrete_key.sign(&self.token.0.as_slice(),Some(noise)))
+ }
+
+ pub fn verify(&self, public_key: &PublicKey, signature: &TokenSignature) -> Result<(), ed25519_compact::Error> {
+ public_key.verify(self.token.0.as_slice(), &signature.0)
+ }
}
\ No newline at end of file
diff --git a/src/share/decentralized/decentralized_share_token.rs b/src/share/decentralized/decentralized_share_token.rs
index 2ca4853..8b41098 100644
--- a/src/share/decentralized/decentralized_share_token.rs
+++ b/src/share/decentralized/decentralized_share_token.rs
@@ -40,14 +40,14 @@ impl DecentralizedShareToken {
&expires_at);
assert_eq!(token.len(), 32);
Self {
- token: SecreteShareLinkToken(<[u8; 32]>::try_from(token.as_slice()).unwrap()),
- region,
+ token: ShareLinkToken(<[u8; 32]>::try_from(token.as_slice()).unwrap()),
+ region: *region,
}
}
pub fn sign(&self, secrete_key: &SecretKey, bucket_guid: &BucketGuid) -> TokenSignature {
//let noise = Noise::from_slice(self.region);
- let noise = Noise::from_slice(bucket_guid.as_slice()).unwrap();
+ let noise = Noise::from_slice(bucket_guid.to_bytes()).unwrap();
TokenSignature(secrete_key.sign(&self.token.0.as_slice(),Some(noise)))
}
diff --git a/src/share/share_link_token.rs b/src/share/share_link_token.rs
index b983b9d..f678c48 100644
--- a/src/share/share_link_token.rs
+++ b/src/share/share_link_token.rs
@@ -4,7 +4,7 @@ use aes_gcm::aead::rand_core::{CryptoRng, RngCore};
use base64::engine::general_purpose::URL_SAFE_NO_PAD;
use base64::{DecodeError, Engine};
-#[derive(Debug, PartialEq)]
+#[derive(Debug, PartialEq, Clone)]
pub struct SecreteShareLinkToken(pub [u8; 32]);
impl SecreteShareLinkToken {