diff --git a/src/actor.rs b/src/actor.rs index 98d0225..c8cc7ec 100644 --- a/src/actor.rs +++ b/src/actor.rs @@ -32,11 +32,18 @@ use crate::{ const ACTION_CAP: usize = 1024; pub(crate) const MAX_COMMIT_DELAY: Duration = Duration::from_millis(500); +/// Import an author action. +#[derive(Debug, Serialize, Deserialize)] +pub(crate) struct ImportAuthorAction { + /// The author to import. + pub author: Author, +} + #[derive(derive_more::Debug, derive_more::Display)] enum Action { #[display("NewAuthor")] ImportAuthor { - author: Author, + action: ImportAuthorAction, #[debug("reply")] reply: oneshot::Sender>, }, @@ -221,7 +228,7 @@ struct OpenReplica { /// waiting for the actor to finish happens in an async context, and therefore that the final /// [`SyncHandle::drop`] will not block. #[derive(Debug, Clone)] -pub struct SyncHandle { +pub(crate) struct SyncHandle { tx: async_channel::Sender, join_handle: Arc>>, } @@ -500,9 +507,20 @@ impl SyncHandle { self.send(Action::ListReplicas { reply }).await } + /// Imports the given author. + /// + /// Warning: The [`Author`] struct contains sensitive data. pub async fn import_author(&self, author: Author) -> Result { + self.import_author_action(ImportAuthorAction { author }) + .await + } + + pub(crate) async fn import_author_action( + &self, + action: ImportAuthorAction, + ) -> Result { let (reply, rx) = oneshot::channel(); - self.send(Action::ImportAuthor { author, reply }).await?; + self.send(Action::ImportAuthor { action, reply }).await?; rx.await? } @@ -663,9 +681,9 @@ impl Actor { Action::Shutdown { .. } => { unreachable!("Shutdown is handled in run()") } - Action::ImportAuthor { author, reply } => { - let id = author.id(); - send_reply(reply, self.store.import_author(author).map(|_| id)) + Action::ImportAuthor { action, reply } => { + let id = action.author.id(); + send_reply(reply, self.store.import_author(action.author).map(|_| id)) } Action::ExportAuthor { author, reply } => { send_reply(reply, self.store.get_author(&author)) diff --git a/src/engine.rs b/src/engine.rs index bc98661..779f383 100644 --- a/src/engine.rs +++ b/src/engine.rs @@ -2,14 +2,9 @@ //! //! [`crate::Replica`] is also called documents here. -use std::{ - io, - path::PathBuf, - str::FromStr, - sync::{Arc, RwLock}, -}; +use std::{io, path::PathBuf, str::FromStr, sync::Arc}; -use anyhow::{bail, Context, Result}; +use anyhow::{bail, ensure, Context, Result}; use futures_lite::{Stream, StreamExt}; use iroh::{key::PublicKey, Endpoint, NodeAddr}; use iroh_blobs::{ @@ -18,7 +13,7 @@ use iroh_blobs::{ }; use iroh_gossip::net::Gossip; use serde::{Deserialize, Serialize}; -use tokio::sync::{mpsc, oneshot}; +use tokio::sync::{mpsc, oneshot, RwLock}; use tokio_util::task::AbortOnDropHandle; use tracing::{error, error_span, Instrument}; @@ -45,11 +40,11 @@ const SUBSCRIBE_CHANNEL_CAP: usize = 256; #[derive(derive_more::Debug, Clone)] pub struct Engine { /// [`Endpoint`] used by the engine. - pub endpoint: Endpoint, + pub(crate) endpoint: Endpoint, /// Handle to the actor thread. - pub sync: SyncHandle, + pub(crate) sync: SyncHandle, /// The persistent default author for this engine. - pub default_author: Arc, + default_author: Arc, to_live_actor: mpsc::Sender, #[allow(dead_code)] actor_handle: Arc>, @@ -252,6 +247,93 @@ impl Engine { pub fn local_pool_handle(&self) -> &LocalPoolHandle { &self.local_pool_handle } + + /// Authors API. + pub fn authors(&self) -> Authors { + Authors { + sync: self.sync.clone(), + default_author: self.default_author.clone(), + } + } +} + +/// Authors client +#[derive(Debug, Clone)] +pub struct Authors { + sync: SyncHandle, + default_author: Arc, +} + +impl Authors { + /// Creates a new document author. + /// + /// You likely want to save the returned [`AuthorId`] somewhere so that you can use this author + /// again. + /// + /// If you need only a single author, use [`Self::default`]. + pub async fn create(&self) -> Result { + let mut rng = rand::rngs::OsRng::default(); + let author = Author::new(&mut rng); + self.sync.import_author(author).await + } + + /// Returns the default document author of this node. + /// + /// On persistent nodes, the author is created on first start and its public key is saved + /// in the data directory. + /// + /// The default author can be set with [`Self::set_default`]. + pub async fn default(&self) -> AuthorId { + self.default_author.get().await + } + + /// Sets the node-wide default author. + /// + /// If the author does not exist, an error is returned. + /// + /// On a persistent node, the author id will be saved to a file in the data directory and + /// reloaded after a restart. + pub async fn set_default(&self, author_id: AuthorId) -> Result<()> { + self.default_author.set(author_id, &self.sync).await + } + + /// Lists document authors for which we have a secret key. + /// + /// It's only possible to create writes from authors that we have the secret key of. + pub async fn list(&self) -> Result>> { + let (tx, rx) = async_channel::bounded(64); + self.sync.list_authors(tx).await?; + + Ok(rx) + } + + /// Exports the given author. + /// + /// Warning: The [`Author`] struct contains sensitive data. + pub async fn export(&self, author: AuthorId) -> Result> { + self.sync.export_author(author).await + } + + /// Imports the given author. + /// + /// Warning: The [`Author`] struct contains sensitive data. + pub async fn import(&self, author: Author) -> Result { + self.sync.import_author(author).await + } + + /// Deletes the given author by id. + /// + /// Warning: This permanently removes this author. + /// + /// Returns an error if attempting to delete the default author. + pub async fn delete(&self, author: AuthorId) -> Result<()> { + let default_author = self.default().await; + ensure!( + author != default_author, + "Deleting the default author is not supported" + ); + self.sync.delete_author(author).await + } } /// Converts an [`EntryStatus`] into a ['ContentStatus']. @@ -359,7 +441,7 @@ impl DefaultAuthorStorage { /// /// Returns an error if the author can't be parsed or if the uathor does not exist in the docs /// store. - pub async fn load(&self, docs_store: &SyncHandle) -> anyhow::Result { + pub(crate) async fn load(&self, docs_store: &SyncHandle) -> anyhow::Result { match self { Self::Mem => { let author = Author::new(&mut rand::thread_rng()); @@ -432,7 +514,10 @@ impl DefaultAuthor { /// Load the default author from storage. /// /// If the storage is empty creates a new author and persists it. - pub async fn load(storage: DefaultAuthorStorage, docs_store: &SyncHandle) -> Result { + pub(crate) async fn load( + storage: DefaultAuthorStorage, + docs_store: &SyncHandle, + ) -> Result { let value = storage.load(docs_store).await?; Ok(Self { value: RwLock::new(value), @@ -441,17 +526,17 @@ impl DefaultAuthor { } /// Get the current default author. - pub fn get(&self) -> AuthorId { - *self.value.read().unwrap() + pub async fn get(&self) -> AuthorId { + *self.value.read().await } /// Set the default author. - pub async fn set(&self, author_id: AuthorId, docs_store: &SyncHandle) -> Result<()> { + pub(crate) async fn set(&self, author_id: AuthorId, docs_store: &SyncHandle) -> Result<()> { if docs_store.export_author(author_id).await?.is_none() { bail!("The author does not exist"); } self.storage.persist(author_id).await?; - *self.value.write().unwrap() = author_id; + *self.value.write().await = author_id; Ok(()) } } diff --git a/src/net.rs b/src/net.rs index 8d15e37..469e78e 100644 --- a/src/net.rs +++ b/src/net.rs @@ -25,7 +25,7 @@ pub const ALPN: &[u8] = b"/iroh-sync/1"; mod codec; /// Connect to a peer and sync a replica -pub async fn connect_and_sync( +pub(crate) async fn connect_and_sync( endpoint: &Endpoint, sync: &SyncHandle, namespace: NamespaceId, @@ -104,7 +104,7 @@ pub enum AcceptOutcome { } /// Handle an iroh-docs connection and sync all shared documents in the replica store. -pub async fn handle_connection( +pub(crate) async fn handle_connection( sync: SyncHandle, connecting: iroh::endpoint::Connecting, accept_cb: F, diff --git a/src/rpc.rs b/src/rpc.rs index d1e974a..b386fdd 100644 --- a/src/rpc.rs +++ b/src/rpc.rs @@ -65,14 +65,6 @@ impl Engine { SetDownloadPolicy(msg) => chan.rpc(msg, this, Self::doc_set_download_policy).await, GetDownloadPolicy(msg) => chan.rpc(msg, this, Self::doc_get_download_policy).await, GetSyncPeers(msg) => chan.rpc(msg, this, Self::doc_get_sync_peers).await, - - AuthorList(msg) => chan.server_streaming(msg, this, Self::author_list).await, - AuthorCreate(msg) => chan.rpc(msg, this, Self::author_create).await, - AuthorImport(msg) => chan.rpc(msg, this, Self::author_import).await, - AuthorExport(msg) => chan.rpc(msg, this, Self::author_export).await, - AuthorDelete(msg) => chan.rpc(msg, this, Self::author_delete).await, - AuthorGetDefault(msg) => chan.rpc(msg, this, Self::author_default).await, - AuthorSetDefault(msg) => chan.rpc(msg, this, Self::author_set_default).await, } } } diff --git a/src/rpc/client.rs b/src/rpc/client.rs index 90a0ad7..83d5fdb 100644 --- a/src/rpc/client.rs +++ b/src/rpc/client.rs @@ -2,7 +2,6 @@ use anyhow::Result; use futures_util::{Stream, StreamExt}; -pub mod authors; pub mod docs; fn flatten( diff --git a/src/rpc/client/authors.rs b/src/rpc/client/authors.rs index 18a154f..8da16ca 100644 --- a/src/rpc/client/authors.rs +++ b/src/rpc/client/authors.rs @@ -9,13 +9,7 @@ use quic_rpc::{client::BoxedConnector, Connector}; use super::flatten; #[doc(inline)] pub use crate::engine::{Origin, SyncEvent, SyncReason}; -use crate::{ - rpc::proto::{ - AuthorCreateRequest, AuthorDeleteRequest, AuthorExportRequest, AuthorGetDefaultRequest, - AuthorImportRequest, AuthorListRequest, AuthorSetDefaultRequest, RpcService, - }, - Author, AuthorId, -}; +use crate::{actor::ImportAuthorAction, rpc::proto::RpcService, Author, AuthorId}; /// Iroh docs client. #[derive(Debug, Clone)] @@ -85,7 +79,7 @@ impl> Client { /// /// Warning: The [`Author`] struct contains sensitive data. pub async fn import(&self, author: Author) -> Result<()> { - self.rpc.rpc(AuthorImportRequest { author }).await??; + self.rpc.rpc(ImportAuthorAction { author }).await??; Ok(()) } diff --git a/src/rpc/client/docs.rs b/src/rpc/client/docs.rs index 8562fa5..c21a4d6 100644 --- a/src/rpc/client/docs.rs +++ b/src/rpc/client/docs.rs @@ -20,7 +20,7 @@ use quic_rpc::{ }; use serde::{Deserialize, Serialize}; -use super::{authors, flatten}; +use super::flatten; use crate::{ actor::OpenState, rpc::proto::{ @@ -56,11 +56,6 @@ impl> Client { Self { rpc } } - /// Returns an authors client. - pub fn authors(&self) -> authors::Client { - authors::Client::new(self.rpc.clone()) - } - /// Creates a client. pub async fn create(&self) -> Result> { let res = self.rpc.rpc(CreateRequest {}).await??; diff --git a/src/rpc/docs_handle_request.rs b/src/rpc/docs_handle_request.rs index 2c147ff..1d5c74b 100644 --- a/src/rpc/docs_handle_request.rs +++ b/src/rpc/docs_handle_request.rs @@ -12,124 +12,26 @@ use iroh_blobs::{ use super::{ client::docs::ShareMode, proto::{ - AuthorCreateRequest, AuthorCreateResponse, AuthorDeleteRequest, AuthorDeleteResponse, - AuthorExportRequest, AuthorExportResponse, AuthorGetDefaultRequest, - AuthorGetDefaultResponse, AuthorImportRequest, AuthorImportResponse, AuthorListRequest, - AuthorListResponse, AuthorSetDefaultRequest, AuthorSetDefaultResponse, CloseRequest, - CloseResponse, CreateRequest as DocCreateRequest, CreateResponse as DocCreateResponse, - DelRequest, DelResponse, DocListRequest, DocSubscribeRequest, DocSubscribeResponse, - DropRequest, DropResponse, ExportFileRequest, ExportFileResponse, GetDownloadPolicyRequest, - GetDownloadPolicyResponse, GetExactRequest, GetExactResponse, GetManyRequest, - GetManyResponse, GetSyncPeersRequest, GetSyncPeersResponse, ImportFileRequest, - ImportFileResponse, ImportRequest as DocImportRequest, ImportResponse as DocImportResponse, - LeaveRequest, LeaveResponse, ListResponse as DocListResponse, OpenRequest, OpenResponse, + CloseRequest, CloseResponse, CreateRequest as DocCreateRequest, + CreateResponse as DocCreateResponse, DelRequest, DelResponse, DocListRequest, + DocSubscribeRequest, DocSubscribeResponse, DropRequest, DropResponse, ExportFileRequest, + ExportFileResponse, GetDownloadPolicyRequest, GetDownloadPolicyResponse, GetExactRequest, + GetExactResponse, GetManyRequest, GetManyResponse, GetSyncPeersRequest, + GetSyncPeersResponse, ImportFileRequest, ImportFileResponse, + ImportRequest as DocImportRequest, ImportResponse as DocImportResponse, LeaveRequest, + LeaveResponse, ListResponse as DocListResponse, OpenRequest, OpenResponse, SetDownloadPolicyRequest, SetDownloadPolicyResponse, SetHashRequest, SetHashResponse, SetRequest, SetResponse, ShareRequest, ShareResponse, StartSyncRequest, StartSyncResponse, StatusRequest, StatusResponse, }, RpcError, RpcResult, }; -use crate::{engine::Engine, Author, DocTicket, NamespaceSecret}; +use crate::{engine::Engine, DocTicket, NamespaceSecret}; /// Capacity for the flume channels to forward sync store iterators to async RPC streams. const ITER_CHANNEL_CAP: usize = 64; impl Engine { - pub(super) async fn author_create( - self, - _req: AuthorCreateRequest, - ) -> RpcResult { - // TODO: pass rng - let author = Author::new(&mut rand::rngs::OsRng {}); - self.sync - .import_author(author.clone()) - .await - .map_err(|e| RpcError::new(&*e))?; - Ok(AuthorCreateResponse { - author_id: author.id(), - }) - } - - pub(super) async fn author_default( - self, - _req: AuthorGetDefaultRequest, - ) -> RpcResult { - let author_id = self.default_author.get(); - Ok(AuthorGetDefaultResponse { author_id }) - } - - pub(super) async fn author_set_default( - self, - req: AuthorSetDefaultRequest, - ) -> RpcResult { - self.default_author - .set(req.author_id, &self.sync) - .await - .map_err(|e| RpcError::new(&*e))?; - Ok(AuthorSetDefaultResponse) - } - - pub(super) fn author_list( - self, - _req: AuthorListRequest, - ) -> impl Stream> + Unpin { - let (tx, rx) = async_channel::bounded(ITER_CHANNEL_CAP); - let sync = self.sync.clone(); - // we need to spawn a task to send our request to the sync handle, because the method - // itself must be sync. - tokio::task::spawn(async move { - let tx2 = tx.clone(); - if let Err(err) = sync.list_authors(tx).await { - tx2.send(Err(err)).await.ok(); - } - }); - rx.boxed().map(|r| { - r.map(|author_id| AuthorListResponse { author_id }) - .map_err(|e| RpcError::new(&*e)) - }) - } - - pub(super) async fn author_import( - self, - req: AuthorImportRequest, - ) -> RpcResult { - let author_id = self - .sync - .import_author(req.author) - .await - .map_err(|e| RpcError::new(&*e))?; - Ok(AuthorImportResponse { author_id }) - } - - pub(super) async fn author_export( - self, - req: AuthorExportRequest, - ) -> RpcResult { - let author = self - .sync - .export_author(req.author) - .await - .map_err(|e| RpcError::new(&*e))?; - - Ok(AuthorExportResponse { author }) - } - - pub(super) async fn author_delete( - self, - req: AuthorDeleteRequest, - ) -> RpcResult { - if req.author == self.default_author.get() { - return Err(RpcError::new(&*anyhow!( - "Deleting the default author is not supported" - ))); - } - self.sync - .delete_author(req.author) - .await - .map_err(|e| RpcError::new(&*e))?; - Ok(AuthorDeleteResponse) - } - pub(super) async fn doc_create(self, _req: DocCreateRequest) -> RpcResult { let namespace = NamespaceSecret::new(&mut rand::rngs::OsRng {}); let id = namespace.id(); diff --git a/src/rpc/proto.rs b/src/rpc/proto.rs index b1f4fcb..40da09e 100644 --- a/src/rpc/proto.rs +++ b/src/rpc/proto.rs @@ -18,8 +18,7 @@ use crate::{ actor::OpenState, engine::LiveEvent, store::{DownloadPolicy, Query}, - Author, AuthorId, Capability, CapabilityKind, DocTicket, Entry, NamespaceId, PeerIdBytes, - SignedEntry, + AuthorId, Capability, CapabilityKind, DocTicket, Entry, NamespaceId, PeerIdBytes, SignedEntry, }; /// The RPC service type for the docs protocol. @@ -78,20 +77,6 @@ pub enum Request { SetDownloadPolicy(SetDownloadPolicyRequest), #[rpc(response = RpcResult)] GetSyncPeers(GetSyncPeersRequest), - #[server_streaming(response = RpcResult)] - AuthorList(AuthorListRequest), - #[rpc(response = RpcResult)] - AuthorCreate(AuthorCreateRequest), - #[rpc(response = RpcResult)] - AuthorGetDefault(AuthorGetDefaultRequest), - #[rpc(response = RpcResult)] - AuthorSetDefault(AuthorSetDefaultRequest), - #[rpc(response = RpcResult)] - AuthorImport(AuthorImportRequest), - #[rpc(response = RpcResult)] - AuthorExport(AuthorExportRequest), - #[rpc(response = RpcResult)] - AuthorDelete(AuthorDeleteRequest), } #[allow(missing_docs)] @@ -120,13 +105,6 @@ pub enum Response { SetDownloadPolicy(RpcResult), GetSyncPeers(RpcResult), StreamCreated(RpcResult), - AuthorList(RpcResult), - AuthorCreate(RpcResult), - AuthorGetDefault(RpcResult), - AuthorSetDefault(RpcResult), - AuthorImport(RpcResult), - AuthorExport(RpcResult), - AuthorDelete(RpcResult), } /// Subscribe to events for a document. @@ -454,86 +432,3 @@ pub struct GetSyncPeersResponse { /// List of peers ids pub peers: Option>, } - -/// List document authors for which we have a secret key. -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorListRequest {} - -/// Response for [`AuthorListRequest`] -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorListResponse { - /// The author id - pub author_id: AuthorId, -} - -/// Create a new document author. -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorCreateRequest; - -/// Response for [`AuthorCreateRequest`] -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorCreateResponse { - /// The id of the created author - pub author_id: AuthorId, -} - -/// Get the default author. -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorGetDefaultRequest; - -/// Response for [`AuthorGetDefaultRequest`] -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorGetDefaultResponse { - /// The id of the author - pub author_id: AuthorId, -} - -/// Set the default author. -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorSetDefaultRequest { - /// The id of the author - pub author_id: AuthorId, -} - -/// Response for [`AuthorSetDefaultRequest`] -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorSetDefaultResponse; - -/// Delete an author -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorDeleteRequest { - /// The id of the author to delete - pub author: AuthorId, -} - -/// Response for [`AuthorDeleteRequest`] -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorDeleteResponse; - -/// Exports an author -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorExportRequest { - /// The id of the author to delete - pub author: AuthorId, -} - -/// Response for [`AuthorExportRequest`] -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorExportResponse { - /// The author - pub author: Option, -} - -/// Import author from secret key -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorImportRequest { - /// The author to import - pub author: Author, -} - -/// Response to [`ImportRequest`] -#[derive(Serialize, Deserialize, Debug)] -pub struct AuthorImportResponse { - /// The author id of the imported author - pub author_id: AuthorId, -} diff --git a/tests/client.rs b/tests/client.rs index 6aacd05..872d659 100644 --- a/tests/client.rs +++ b/tests/client.rs @@ -19,7 +19,7 @@ async fn test_doc_close() -> Result<()> { let _guard = iroh_test::logging::setup(); let node = Node::memory().spawn().await?; - let author = node.authors().default().await?; + let author = node.authors().default().await; // open doc two times let doc1 = node.docs().create().await?; let doc2 = node.docs().open(doc1.id()).await?.expect("doc to exist"); @@ -117,7 +117,7 @@ async fn test_authors() -> Result<()> { // default author always exists let authors: Vec<_> = node.authors().list().await?.try_collect().await?; assert_eq!(authors.len(), 1); - let default_author = node.authors().default().await?; + let default_author = node.authors().default().await; assert_eq!(authors, vec![default_author]); let author_id = node.authors().create().await?; @@ -139,9 +139,9 @@ async fn test_authors() -> Result<()> { let authors: Vec<_> = node.authors().list().await?.try_collect().await?; assert_eq!(authors.len(), 2); - assert!(node.authors().default().await? != author_id); + assert!(node.authors().default().await != author_id); node.authors().set_default(author_id).await?; - assert_eq!(node.authors().default().await?, author_id); + assert_eq!(node.authors().default().await, author_id); Ok(()) } @@ -149,7 +149,7 @@ async fn test_authors() -> Result<()> { #[tokio::test] async fn test_default_author_memory() -> Result<()> { let iroh = Node::memory().spawn().await?; - let author = iroh.authors().default().await?; + let author = iroh.authors().default().await; assert!(iroh.authors().export(author).await?.is_some()); assert!(iroh.authors().delete(author).await.is_err()); Ok(()) @@ -165,7 +165,7 @@ async fn test_default_author_persist() -> TestResult<()> { // check that the default author exists and cannot be deleted. let default_author = { let iroh = Node::persistent(iroh_root).spawn().await?; - let author = iroh.authors().default().await?; + let author = iroh.authors().default().await; assert!(iroh.authors().export(author).await?.is_some()); assert!(iroh.authors().delete(author).await.is_err()); iroh.shutdown().await?; @@ -175,7 +175,7 @@ async fn test_default_author_persist() -> TestResult<()> { // check that the default author is persisted across restarts. { let iroh = Node::persistent(iroh_root).spawn().await?; - let author = iroh.authors().default().await?; + let author = iroh.authors().default().await; assert_eq!(author, default_author); assert!(iroh.authors().export(author).await?.is_some()); assert!(iroh.authors().delete(author).await.is_err()); @@ -187,7 +187,7 @@ async fn test_default_author_persist() -> TestResult<()> { let default_author = { tokio::fs::remove_file(iroh_root.join("default-author")).await?; let iroh = Node::persistent(iroh_root).spawn().await?; - let author = iroh.authors().default().await?; + let author = iroh.authors().default().await; assert!(author != default_author); assert!(iroh.authors().export(author).await?.is_some()); assert!(iroh.authors().delete(author).await.is_err()); @@ -223,13 +223,13 @@ async fn test_default_author_persist() -> TestResult<()> { let iroh = Node::persistent(iroh_root).spawn().await?; let author = iroh.authors().create().await?; iroh.authors().set_default(author).await?; - assert_eq!(iroh.authors().default().await?, author); + assert_eq!(iroh.authors().default().await, author); iroh.shutdown().await?; author }; { let iroh = Node::persistent(iroh_root).spawn().await?; - assert_eq!(iroh.authors().default().await?, default_author); + assert_eq!(iroh.authors().default().await, default_author); iroh.shutdown().await?; } diff --git a/tests/util.rs b/tests/util.rs index 8f92a26..b280143 100644 --- a/tests/util.rs +++ b/tests/util.rs @@ -74,15 +74,15 @@ impl quic_rpc::Service for Service { pub struct Client { blobs: iroh_blobs::rpc::client::blobs::Client, docs: iroh_docs::rpc::client::docs::Client, - authors: iroh_docs::rpc::client::authors::Client, + authors: iroh_docs::engine::Authors, } impl Client { - fn new(client: quic_rpc::RpcClient) -> Self { + fn new(client: quic_rpc::RpcClient, authors: iroh_docs::engine::Authors) -> Self { Self { blobs: iroh_blobs::rpc::client::blobs::Client::new(client.clone().map().boxed()), docs: iroh_docs::rpc::client::docs::Client::new(client.clone().map().boxed()), - authors: iroh_docs::rpc::client::authors::Client::new(client.map().boxed()), + authors, } } @@ -94,7 +94,7 @@ impl Client { &self.docs } - pub fn authors(&self) -> &iroh_docs::rpc::client::authors::Client { + pub fn authors(&self) -> &iroh_docs::engine::Authors { &self.authors } } @@ -235,7 +235,9 @@ impl Builder { })?; } - let client = Client::new(client); + let authors = docs.authors(); + + let client = Client::new(client, authors); Ok(Node { router, client, @@ -355,4 +357,9 @@ impl Node { pub fn client(&self) -> &Client { &self.client } + + /// Returns an author api + pub fn authors(&self) -> &iroh_docs::engine::Authors { + &self.client.authors + } }