diff --git a/.vscode/launch.json b/.vscode/launch.json index 62e46f7aaf..db07b39982 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -76,6 +76,138 @@ "OC_SERVICE_ACCOUNT_SECRET": "service-account-secret" } }, + { + "name": "OpenCloud server with Groupware", + "type": "go", + "request": "launch", + "mode": "debug", + "buildFlags": [ + // "-tags", "enable_vips" + ], + "program": "${workspaceFolder}/opencloud/cmd/opencloud", + "args": ["server"], + "env": { + // log settings for human developers + "OC_LOG_LEVEL": "info", + "OC_LOG_PRETTY": "true", + "OC_LOG_COLOR": "true", + // set insecure options because we don't have valid certificates in dev environments + "OC_INSECURE": "true", + // enable basic auth for dev setup so that we can use curl for testing + "PROXY_ENABLE_BASIC_AUTH": "true", + // demo users + "IDM_CREATE_DEMO_USERS": "true", + // OC_RUN_SERVICES allows to start a subset of services even in the supervised mode + //"OC_RUN_SERVICES": "settings,storage-system,graph,idp,idm,ocs,store,thumbnails,web,webdav,frontend,gateway,users,groups,auth-basic,storage-authmachine,storage-users,storage-shares,storage-publiclink,storage-system,app-provider,sharing,proxy,ocdav", + + /* + * Keep secrets and passwords in one block to allow easy uncommenting + */ + // user id of "admin", for user creation and admin role assignement + "OC_ADMIN_USER_ID": "some-admin-user-id-0000-000000000000", // FIXME currently must have the length of a UUID, see reva/pkg/storage/utils/decomposedfs/spaces.go:228 + // admin user default password + "IDM_ADMIN_PASSWORD": "admin", + // system user + "OC_SYSTEM_USER_ID": "some-system-user-id-000-000000000000", // FIXME currently must have the length of a UUID, see reva/pkg/storage/utils/decomposedfs/spaces.go:228 + "OC_SYSTEM_USER_API_KEY": "some-system-user-machine-auth-api-key", + // set some hardcoded secrets + "OC_JWT_SECRET": "some-opencloud-jwt-secret", + "OC_MACHINE_AUTH_API_KEY": "some-opencloud-machine-auth-api-key", + "OC_TRANSFER_SECRET": "some-opencloud-transfer-secret", + // collaboration + "COLLABORATION_WOPIAPP_SECRET": "some-wopi-secret", + // idm ldap + "IDM_SVC_PASSWORD": "some-ldap-idm-password", + "GRAPH_LDAP_BIND_PASSWORD": "some-ldap-idm-password", + // reva ldap + "IDM_REVASVC_PASSWORD": "some-ldap-reva-password", + "GROUPS_LDAP_BIND_PASSWORD": "some-ldap-reva-password", + "USERS_LDAP_BIND_PASSWORD": "some-ldap-reva-password", + "AUTH_BASIC_LDAP_BIND_PASSWORD": "some-ldap-reva-password", + // idp ldap + "IDM_IDPSVC_PASSWORD": "some-ldap-idp-password", + "IDP_LDAP_BIND_PASSWORD": "some-ldap-idp-password", + // storage users mount ID + "GATEWAY_STORAGE_USERS_MOUNT_ID": "storage-users-1", + "STORAGE_USERS_MOUNT_ID": "storage-users-1", + // graph application ID + "GRAPH_APPLICATION_ID": "application-1", + + // service accounts + "OC_SERVICE_ACCOUNT_ID": "service-account-id", + "OC_SERVICE_ACCOUNT_SECRET": "service-account-secret", + + "OC_ADD_RUN_SERVICES": "groupware", + "GROUPWARE_LOG_LEVEL": "trace" + } + }, + { + "name": "OpenCloud server with external services", + "type": "go", + "request": "launch", + "mode": "debug", + "buildFlags": [ + // "-tags", "enable_vips" + ], + "program": "${workspaceFolder}/opencloud/cmd/opencloud", + "args": ["server"], + "env": { + "OC_URL": "https://localhost:9200/", + "PROXY_DEBUG_ADDR": "0.0.0.0:9205", + "OC_BASE_DATA_PATH": "${env:HOME}/.opencloud-with-external", + "OC_CONFIG_DIR": "${env:HOME}/.opencloud-with-external/config", + "GROUPWARE_LOG_LEVEL": "trace", + "OC_LOG_LEVEL": "info", + "OC_LOG_PRETTY": "true", + "OC_LOG_COLOR": "true", + "OC_INSECURE": "true", + "PROXY_ENABLE_BASIC_AUTH": "false", + "IDM_CREATE_DEMO_USERS": "false", + "OC_LDAP_URI": "ldaps://localhost:636", + "OC_LDAP_INSECURE": "true", + "OC_LDAP_BIND_DN": "cn=admin,dc=opencloud,dc=eu", + "OC_LDAP_BIND_PASSWORD": "admin", + "OC_LDAP_GROUP_BASE_DN": "ou=groups,dc=opencloud,dc=eu", + "OC_LDAP_GROUP_SCHEMA_ID": "entryUUID", + "OC_LDAP_USER_BASE_DN": "ou=users,dc=opencloud,dc=eu", + "OC_LDAP_USER_FILTER": "(objectclass=inetOrgPerson)", + "OC_LDAP_USER_SCHEMA_ID": "entryUUID", + "OC_LDAP_DISABLE_USER_MECHANISM": "none", + "OC_LDAP_SERVER_WRITE_ENABLED": "false", + "OC_EXCLUDE_RUN_SERVICES": "idm", + "OC_ADD_RUN_SERVICES": "notifications,groupware", + "NATS_NATS_HOST": "0.0.0.0", + "NATS_NATS_PORT": "9233", + "FRONTEND_ARCHIVER_MAX_SIZE": "10000000000", + "MICRO_REGISTRY_ADDRESS": "127.0.0.1:9233", + "NOTIFICATIONS_SMTP_HOST": "localhost", + "NOTIFICATIONS_SMTP_PORT": "2500", + "NOTIFICATIONS_SMTP_SENDER": "OpenCloud notifications ", + "NOTIFICATIONS_SMTP_USERNAME": "notifications@cloud.opencloud.test", + "NOTIFICATIONS_SMTP_INSECURE": "true", + "NOTIFICATIONS_SMTP_PASSWORD": "", + "NOTIFICATIONS_SMTP_AUTHENTICATION": "", + "NOTIFICATIONS_SMTP_ENCRYPTION": "none", + "PROXY_AUTOPROVISION_ACCOUNTS": "false", + "PROXY_ROLE_ASSIGNMENT_DRIVER": "oidc", + "OC_OIDC_ISSUER": "https://keycloak.opencloud.test/realms/openCloud", + "PROXY_OIDC_REWRITE_WELLKNOWN": "true", + "WEB_OIDC_CLIENT_ID": "web", + "PROXY_USER_OIDC_CLAIM": "uuid", + "PROXY_USER_CS3_CLAIM": "userid", + "WEB_OPTION_ACCOUNT_EDIT_LINK_HREF": "https://keycloak.opencloud.test/realms/openCloud/account", + "OC_ADMIN_USER_ID": "", + "SETTINGS_SETUP_DEFAULT_ASSIGNMENTS": "false", + "GRAPH_ASSIGN_DEFAULT_USER_ROLE": "false", + "GRAPH_USERNAME_MATCH": "none", + "KEYCLOAK_DOMAIN": "keycloak.opencloud.test", + "IDM_ADMIN_PASSWORD": "admin", + "GRAPH_LDAP_SERVER_UUID": "true", + "GRAPH_LDAP_GROUP_CREATE_BASE_DN": "ou=custom,ou=groups,dc=opencloud,dc=eu", + "GRAPH_LDAP_REFINT_ENABLED": "true", + "GATEWAY_GRPC_ADDR": "0.0.0.0:9142", + } + }, { "name": "Fed OpenCloud server", "type": "go", diff --git a/Makefile b/Makefile index 22c8924990..bd45d7972c 100644 --- a/Makefile +++ b/Makefile @@ -27,6 +27,7 @@ OC_MODULES = \ services/app-provider \ services/app-registry \ services/audit \ + services/auth-api \ services/auth-app \ services/auth-basic \ services/auth-bearer \ @@ -39,6 +40,7 @@ OC_MODULES = \ services/gateway \ services/graph \ services/groups \ + services/groupware \ services/idm \ services/idp \ services/invitations \ diff --git a/devtools/deployments/opencloud_full/.env b/devtools/deployments/opencloud_full/.env index 75d6d33f55..39e940fab3 100644 --- a/devtools/deployments/opencloud_full/.env +++ b/devtools/deployments/opencloud_full/.env @@ -305,8 +305,15 @@ KEYCLOAK_ADMIN_PASSWORD= # Leaving it default stores data in docker internal volumes. #RADICALE_DATA_DIR=/your/local/radicale/data +### Stalwart Settings ### +# Note: the leading colon is required to enable the service. +#STALWART=:stalwart.yml +# Domain of Stalwart +# Defaults to "stalwart.opencloud.test" +STALWART_DOMAIN= + ## IMPORTANT ## # This MUST be the last line as it assembles the supplemental compose files to be used. # ALL supplemental configs must be added here, whether commented or not. # Each var must either be empty or contain :path/file.yml -COMPOSE_FILE=docker-compose.yml${OPENCLOUD:-}${TIKA:-}${DECOMPOSEDS3:-}${DECOMPOSEDS3_MINIO:-}${DECOMPOSED:-}${COLLABORA:-}${MONITORING:-}${IMPORTER:-}${CLAMAV:-}${INBUCKET:-}${EXTENSIONS:-}${UNZIP:-}${DRAWIO:-}${JSONVIEWER:-}${PROGRESSBARS:-}${EXTERNALSITES:-}${KEYCLOAK:-}${LDAP:-}${KEYCLOAK_AUTOPROVISIONING:-}${LDAP_MANAGER:-}${RADICALE:-} \ No newline at end of file +COMPOSE_FILE=docker-compose.yml${OPENCLOUD:-}${TIKA:-}${DECOMPOSEDS3:-}${DECOMPOSEDS3_MINIO:-}${DECOMPOSED:-}${COLLABORA:-}${MONITORING:-}${IMPORTER:-}${CLAMAV:-}${INBUCKET:-}${EXTENSIONS:-}${UNZIP:-}${DRAWIO:-}${JSONVIEWER:-}${PROGRESSBARS:-}${EXTERNALSITES:-}${KEYCLOAK:-}${LDAP:-}${KEYCLOAK_AUTOPROVISIONING:-}${LDAP_MANAGER:-}${RADICALE:-}${STALWART:-} diff --git a/devtools/deployments/opencloud_full/config/keycloak/clients/groupware.json b/devtools/deployments/opencloud_full/config/keycloak/clients/groupware.json new file mode 100644 index 0000000000..775bc03ead --- /dev/null +++ b/devtools/deployments/opencloud_full/config/keycloak/clients/groupware.json @@ -0,0 +1,58 @@ +{ + "clientId": "groupware", + "name": "OpenCloud Groupware", + "description": "Used for authenticating automated HTTP clients of the OpenCloud Groupware API", + "rootUrl": "", + "adminUrl": "", + "baseUrl": "", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [ + "/*" + ], + "webOrigins": [ + "/*" + ], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": true, + "protocol": "openid-connect", + "attributes": { + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "groups", + "OpenCloudUnique_ID", + "basic", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ], + "access": { + "view": true, + "configure": true, + "manage": true + } +} diff --git a/devtools/deployments/opencloud_full/config/ldap/ldif/11_ppolicy.ldif b/devtools/deployments/opencloud_full/config/ldap/ldif/11_ppolicy.ldif new file mode 100644 index 0000000000..a9ce3f875c --- /dev/null +++ b/devtools/deployments/opencloud_full/config/ldap/ldif/11_ppolicy.ldif @@ -0,0 +1,26 @@ +dn: ou=policies,dc=opencloud,dc=eu +objectClass: organizationalUnit +objectClass: top +ou: policies + +dn: cn=default,ou=policies,dc=opencloud,dc=eu +cn: default +objectClass: pwdPolicy +objectClass: person +objectClass: top +pwdAllowUserChange: TRUE +pwdAttribute: userPassword +pwdCheckQuality: 0 +pwdExpireWarning: 600 +pwdFailureCountInterval: 30 +pwdGraceAuthNLimit: 5 +pwdInHistory: 5 +pwdLockout: FALSE +pwdLockoutDuration: 0 +pwdMaxAge: 0 +pwdMaxFailure: 5 +pwdMinAge: 0 +pwdMinLength: 1 +pwdMustChange: FALSE +pwdSafeModify: FALSE +sn: default diff --git a/devtools/deployments/opencloud_full/config/stalwart/README.md b/devtools/deployments/opencloud_full/config/stalwart/README.md new file mode 100644 index 0000000000..df25fb0e05 --- /dev/null +++ b/devtools/deployments/opencloud_full/config/stalwart/README.md @@ -0,0 +1,21 @@ +# Stalwart Configuration + +The mechanics are currently to mount a different configuration file depending on the environment, as we support two scenarios that are described in [`services/groupware/DEVELOPER.md`](../../../../../services/groupware/DEVELOPER.md): + + * «production» setup, with OpenLDAP and Keycloak containers + * «homelab» setup, with the built-in IDM (LDAP) and IDP that run as part of the `opencloud` container + +The Docker Compose setup (in [`stalwart.yml`](../../stalwart.yml)) mounts either [`idmldap.toml`](./idmldap.toml) or [`ldap.toml`](./ldap.toml) depending on how the variable `STALWART_AUTH_DIRECTORY` is set, which is either `idmldap` for the homelab setup, or `ldap` for the production setup. + +This is thus all done automatically, but whenever changes are performed to Stalwart configuration files, they must be reflected across those two files, to keep them in sync, as the only entry that should differ is this one: + +```ruby +storage.directory = "ldap" +``` + +or this: + +```ruby +storage.directory = "idmldap" +``` + diff --git a/devtools/deployments/opencloud_full/config/stalwart/config.toml b/devtools/deployments/opencloud_full/config/stalwart/config.toml new file mode 100644 index 0000000000..e7599a6655 --- /dev/null +++ b/devtools/deployments/opencloud_full/config/stalwart/config.toml @@ -0,0 +1,110 @@ +authentication.fallback-admin.secret = "$6$4qPYDVhaUHkKcY7s$bB6qhcukb9oFNYRIvaDZgbwxrMa2RvF5dumCjkBFdX19lSNqrgKltf3aPrFMuQQKkZpK2YNuQ83hB1B3NiWzj." +authentication.fallback-admin.user = "mailadmin" +authentication.master.secret = "$6$4qPYDVhaUHkKcY7s$bB6qhcukb9oFNYRIvaDZgbwxrMa2RvF5dumCjkBFdX19lSNqrgKltf3aPrFMuQQKkZpK2YNuQ83hB1B3NiWzj." +authentication.master.user = "master" +directory.idmldap.attributes.class = "objectClass" +directory.idmldap.attributes.description = "displayName" +directory.idmldap.attributes.email = "mail" +directory.idmldap.attributes.groups = "memberOf" +directory.idmldap.attributes.name = "uid" +directory.idmldap.attributes.secret = "userPassword" +directory.idmldap.base-dn = "o=libregraph-idm" +directory.idmldap.bind.auth.method = "default" +directory.idmldap.bind.dn = "uid=reva,ou=sysusers,o=libregraph-idm" +directory.idmldap.bind.secret = "admin" +directory.idmldap.cache.size = 1048576 +directory.idmldap.cache.ttl.negative = "10m" +directory.idmldap.cache.ttl.positive = "1h" +directory.idmldap.filter.email = "(&(|(objectClass=person)(objectClass=groupOfNames))(mail=?))" +directory.idmldap.filter.name = "(&(|(objectClass=person)(objectClass=groupOfNames))(uid=?))" +directory.idmldap.timeout = "15s" +directory.idmldap.tls.allow-invalid-certs = true +directory.idmldap.tls.enable = true +directory.idmldap.type = "ldap" +directory.idmldap.url = "ldaps://opencloud:9235" +directory.keycloak.auth.method = "user-token" +directory.keycloak.cache.size = 1048576 +directory.keycloak.cache.ttl.negative = "10m" +directory.keycloak.cache.ttl.positive = "1h" +directory.keycloak.endpoint.method = "introspect" +directory.keycloak.endpoint.url = "http://keycloak:8080/realms/openCloud/protocol/openid-connect/userinfo" +directory.keycloak.fields.email = "email" +directory.keycloak.fields.full-name = "name" +directory.keycloak.fields.username = "preferred_username" +directory.keycloak.timeout = "15s" +directory.keycloak.type = "oidc" +directory.ldap.attributes.class = "objectClass" +directory.ldap.attributes.description = "displayName" +directory.ldap.attributes.email = "mail" +directory.ldap.attributes.email-alias = "mailAlias" +directory.ldap.attributes.groups = "memberOf" +directory.ldap.attributes.name = "uid" +directory.ldap.attributes.secret = "userPassword" +directory.ldap.attributes.secret-changed = "pwdChangedTime" +directory.ldap.base-dn = "dc=opencloud,dc=eu" +directory.ldap.bind.auth.dn = "cn=?,ou=users,dc=opencloud,dc=eu" +directory.ldap.bind.auth.enable = true +directory.ldap.bind.auth.search = true +directory.ldap.bind.dn = "cn=admin,dc=opencloud,dc=eu" +directory.ldap.bind.secret = "admin" +directory.ldap.cache.ttl.negative = "10m" +directory.ldap.cache.ttl.positive = "1h" +directory.ldap.filter.email = "(&(|(objectClass=person)(objectClass=groupOfNames))(|(uid=?)(mail=?)(mailAlias=?)(cn=?)))" +directory.ldap.filter.name = "(&(|(objectClass=person)(objectClass=groupOfNames))(|(uid=?)(cn=?)))" +directory.ldap.timeout = "5s" +directory.ldap.tls.allow-invalid-certs = true +directory.ldap.tls.enable = true +directory.ldap.type = "ldap" +directory.ldap.url = "ldap://ldap-server:1389" +http.allowed-endpoint = 200 +http.hsts = true +http.permissive-cors = false +http.url = "'https://' + config_get('server.hostname')" +http.use-x-forwarded = true +metrics.prometheus.auth.secret = "secret" +metrics.prometheus.auth.username = "metrics" +metrics.prometheus.enable = true +server.listener.http.bind = "0.0.0.0:8080" +server.listener.http.protocol = "http" +server.listener.https.bind = "0.0.0.0:443" +server.listener.https.protocol = "http" +server.listener.https.tls.implicit = true +server.listener.imap.bind = "0.0.0.0:143" +server.listener.imap.protocol = "imap" +server.listener.imaptls.bind = "0.0.0.0:993" +server.listener.imaptls.protocol = "imap" +server.listener.imaptls.tls.implicit = true +server.listener.pop3.bind = "0.0.0.0:110" +server.listener.pop3.protocol = "pop3" +server.listener.pop3s.bind = "0.0.0.0:995" +server.listener.pop3s.protocol = "pop3" +server.listener.pop3s.tls.implicit = true +server.listener.sieve.bind = "0.0.0.0:4190" +server.listener.sieve.protocol = "managesieve" +server.listener.smtp.bind = "0.0.0.0:25" +server.listener.smtp.protocol = "smtp" +server.listener.submission.bind = "0.0.0.0:587" +server.listener.submission.protocol = "smtp" +server.listener.submissions.bind = "0.0.0.0:465" +server.listener.submissions.protocol = "smtp" +server.listener.submissions.tls.implicit = true +server.max-connections = 8192 +server.socket.backlog = 1024 +server.socket.nodelay = true +server.socket.reuse-addr = true +server.socket.reuse-port = true +storage.blob = "rocksdb" +storage.data = "rocksdb" +storage.directory = "%{env:STALWART_AUTH_DIRECTORY}%" +storage.fts = "rocksdb" +storage.lookup = "rocksdb" +store.rocksdb.compression = "lz4" +store.rocksdb.path = "/opt/stalwart/data" +store.rocksdb.type = "rocksdb" +tracer.console.ansi = true +tracer.console.buffered = true +tracer.console.enable = true +tracer.console.level = "trace" +tracer.console.lossy = false +tracer.console.multiline = false +tracer.console.type = "stdout" diff --git a/devtools/deployments/opencloud_full/config/stalwart/idmldap.toml b/devtools/deployments/opencloud_full/config/stalwart/idmldap.toml new file mode 100644 index 0000000000..269071e624 --- /dev/null +++ b/devtools/deployments/opencloud_full/config/stalwart/idmldap.toml @@ -0,0 +1,111 @@ +authentication.fallback-admin.secret = "$6$4qPYDVhaUHkKcY7s$bB6qhcukb9oFNYRIvaDZgbwxrMa2RvF5dumCjkBFdX19lSNqrgKltf3aPrFMuQQKkZpK2YNuQ83hB1B3NiWzj." +authentication.fallback-admin.user = "mailadmin" +authentication.master.secret = "$6$4qPYDVhaUHkKcY7s$bB6qhcukb9oFNYRIvaDZgbwxrMa2RvF5dumCjkBFdX19lSNqrgKltf3aPrFMuQQKkZpK2YNuQ83hB1B3NiWzj." +authentication.master.user = "master" +directory.idmldap.attributes.class = "objectClass" +directory.idmldap.attributes.description = "displayName" +directory.idmldap.attributes.email = "mail" +directory.idmldap.attributes.groups = "memberOf" +directory.idmldap.attributes.name = "cn" +directory.idmldap.attributes.secret = "userPassword" +directory.idmldap.base-dn = "o=libregraph-idm" +directory.idmldap.bind.auth.method = "default" +directory.idmldap.bind.dn = "uid=reva,ou=sysusers,o=libregraph-idm" +directory.idmldap.bind.secret = "admin" +directory.idmldap.cache.size = 1048576 +directory.idmldap.cache.ttl.negative = "10m" +directory.idmldap.cache.ttl.positive = "1h" +directory.idmldap.filter.email = "(&(|(objectClass=person)(objectClass=groupOfNames))(mail=?))" +directory.idmldap.filter.name = "(&(|(objectClass=person)(objectClass=groupOfNames))(cn=?))" +directory.idmldap.timeout = "15s" +directory.idmldap.tls.allow-invalid-certs = true +directory.idmldap.tls.enable = true +directory.idmldap.type = "ldap" +directory.idmldap.url = "ldaps://opencloud:9235" +directory.keycloak.auth.method = "user-token" +directory.keycloak.cache.size = 1048576 +directory.keycloak.cache.ttl.negative = "10m" +directory.keycloak.cache.ttl.positive = "1h" +directory.keycloak.endpoint.method = "introspect" +directory.keycloak.endpoint.url = "http://keycloak:8080/realms/openCloud/protocol/openid-connect/userinfo" +directory.keycloak.fields.email = "email" +directory.keycloak.fields.full-name = "name" +directory.keycloak.fields.username = "preferred_username" +directory.keycloak.timeout = "15s" +directory.keycloak.type = "oidc" +directory.ldap.attributes.class = "objectClass" +directory.ldap.attributes.description = "displayName" +directory.ldap.attributes.email = "mail" +directory.ldap.attributes.email-alias = "mailAlias" +directory.ldap.attributes.groups = "memberOf" +directory.ldap.attributes.name = "uid" +directory.ldap.attributes.secret = "userPassword" +directory.ldap.attributes.secret-changed = "pwdChangedTime" +directory.ldap.base-dn = "dc=opencloud,dc=eu" +directory.ldap.bind.auth.dn = "cn=?,ou=users,dc=opencloud,dc=eu" +directory.ldap.bind.auth.enable = true +directory.ldap.bind.auth.search = true +directory.ldap.bind.dn = "cn=admin,dc=opencloud,dc=eu" +directory.ldap.bind.secret = "admin" +directory.ldap.cache.ttl.negative = "10m" +directory.ldap.cache.ttl.positive = "1h" +directory.ldap.filter.email = "(&(|(objectClass=person)(objectClass=groupOfNames))(|(uid=?)(mail=?)(mailAlias=?)(cn=?)))" +directory.ldap.filter.name = "(&(|(objectClass=person)(objectClass=groupOfNames))(|(uid=?)(cn=?)))" +directory.ldap.timeout = "5s" +directory.ldap.tls.allow-invalid-certs = true +directory.ldap.tls.enable = true +directory.ldap.type = "ldap" +directory.ldap.url = "ldap://ldap-server:1389" +http.allowed-endpoint = 200 +http.hsts = true +http.permissive-cors = false +http.url = "'https://' + config_get('server.hostname')" +http.use-x-forwarded = true +metrics.prometheus.auth.secret = "secret" +metrics.prometheus.auth.username = "metrics" +metrics.prometheus.enable = true +server.listener.http.bind = "0.0.0.0:8080" +server.listener.http.protocol = "http" +server.listener.https.bind = "0.0.0.0:443" +server.listener.https.protocol = "http" +server.listener.https.tls.implicit = true +server.listener.imap.bind = "0.0.0.0:143" +server.listener.imap.protocol = "imap" +server.listener.imaptls.bind = "0.0.0.0:993" +server.listener.imaptls.protocol = "imap" +server.listener.imaptls.tls.implicit = true +server.listener.pop3.bind = "0.0.0.0:110" +server.listener.pop3.protocol = "pop3" +server.listener.pop3s.bind = "0.0.0.0:995" +server.listener.pop3s.protocol = "pop3" +server.listener.pop3s.tls.implicit = true +server.listener.sieve.bind = "0.0.0.0:4190" +server.listener.sieve.protocol = "managesieve" +server.listener.smtp.bind = "0.0.0.0:25" +server.listener.smtp.protocol = "smtp" +server.listener.submission.bind = "0.0.0.0:587" +server.listener.submission.protocol = "smtp" +server.listener.submissions.bind = "0.0.0.0:465" +server.listener.submissions.protocol = "smtp" +server.listener.submissions.tls.implicit = true +server.max-connections = 8192 +server.socket.backlog = 1024 +server.socket.nodelay = true +server.socket.reuse-addr = true +server.socket.reuse-port = true +storage.blob = "rocksdb" +storage.data = "rocksdb" +storage.directory = "idmldap" +storage.fts = "rocksdb" +storage.lookup = "rocksdb" +store.rocksdb.compression = "lz4" +store.rocksdb.path = "/opt/stalwart/data" +store.rocksdb.type = "rocksdb" +tracer.console.ansi = true +tracer.console.buffered = true +tracer.console.enable = true +tracer.console.level = "trace" +tracer.console.lossy = false +tracer.console.multiline = false +tracer.console.type = "stdout" +sharing.allow-directory-query = false diff --git a/devtools/deployments/opencloud_full/config/stalwart/ldap.toml b/devtools/deployments/opencloud_full/config/stalwart/ldap.toml new file mode 100644 index 0000000000..8b2c465296 --- /dev/null +++ b/devtools/deployments/opencloud_full/config/stalwart/ldap.toml @@ -0,0 +1,110 @@ +authentication.fallback-admin.secret = "$6$4qPYDVhaUHkKcY7s$bB6qhcukb9oFNYRIvaDZgbwxrMa2RvF5dumCjkBFdX19lSNqrgKltf3aPrFMuQQKkZpK2YNuQ83hB1B3NiWzj." +authentication.fallback-admin.user = "mailadmin" +authentication.master.secret = "$6$4qPYDVhaUHkKcY7s$bB6qhcukb9oFNYRIvaDZgbwxrMa2RvF5dumCjkBFdX19lSNqrgKltf3aPrFMuQQKkZpK2YNuQ83hB1B3NiWzj." +authentication.master.user = "master" +directory.idmldap.attributes.class = "objectClass" +directory.idmldap.attributes.description = "displayName" +directory.idmldap.attributes.email = "mail" +directory.idmldap.attributes.groups = "memberOf" +directory.idmldap.attributes.name = "uid" +directory.idmldap.attributes.secret = "userPassword" +directory.idmldap.base-dn = "o=libregraph-idm" +directory.idmldap.bind.auth.method = "default" +directory.idmldap.bind.dn = "uid=reva,ou=sysusers,o=libregraph-idm" +directory.idmldap.bind.secret = "admin" +directory.idmldap.cache.size = 1048576 +directory.idmldap.cache.ttl.negative = "10m" +directory.idmldap.cache.ttl.positive = "1h" +directory.idmldap.filter.email = "(&(|(objectClass=person)(objectClass=groupOfNames))(mail=?))" +directory.idmldap.filter.name = "(&(|(objectClass=person)(objectClass=groupOfNames))(uid=?))" +directory.idmldap.timeout = "15s" +directory.idmldap.tls.allow-invalid-certs = true +directory.idmldap.tls.enable = true +directory.idmldap.type = "ldap" +directory.idmldap.url = "ldaps://opencloud:9235" +directory.keycloak.auth.method = "user-token" +directory.keycloak.cache.size = 1048576 +directory.keycloak.cache.ttl.negative = "10m" +directory.keycloak.cache.ttl.positive = "1h" +directory.keycloak.endpoint.method = "introspect" +directory.keycloak.endpoint.url = "http://keycloak:8080/realms/openCloud/protocol/openid-connect/userinfo" +directory.keycloak.fields.email = "email" +directory.keycloak.fields.full-name = "name" +directory.keycloak.fields.username = "preferred_username" +directory.keycloak.timeout = "15s" +directory.keycloak.type = "oidc" +directory.ldap.attributes.class = "objectClass" +directory.ldap.attributes.description = "displayName" +directory.ldap.attributes.email = "mail" +directory.ldap.attributes.email-alias = "mailAlias" +directory.ldap.attributes.groups = "memberOf" +directory.ldap.attributes.name = "uid" +directory.ldap.attributes.secret = "userPassword" +directory.ldap.attributes.secret-changed = "pwdChangedTime" +directory.ldap.base-dn = "dc=opencloud,dc=eu" +directory.ldap.bind.auth.dn = "cn=?,ou=users,dc=opencloud,dc=eu" +directory.ldap.bind.auth.enable = true +directory.ldap.bind.auth.search = true +directory.ldap.bind.dn = "cn=admin,dc=opencloud,dc=eu" +directory.ldap.bind.secret = "admin" +directory.ldap.cache.ttl.negative = "10m" +directory.ldap.cache.ttl.positive = "1h" +directory.ldap.filter.email = "(&(|(objectClass=person)(objectClass=groupOfNames))(|(uid=?)(mail=?)(mailAlias=?)(cn=?)))" +directory.ldap.filter.name = "(&(|(objectClass=person)(objectClass=groupOfNames))(|(uid=?)(cn=?)))" +directory.ldap.timeout = "5s" +directory.ldap.tls.allow-invalid-certs = true +directory.ldap.tls.enable = true +directory.ldap.type = "ldap" +directory.ldap.url = "ldap://ldap-server:1389" +http.allowed-endpoint = 200 +http.hsts = true +http.permissive-cors = false +http.url = "'https://' + config_get('server.hostname')" +http.use-x-forwarded = true +metrics.prometheus.auth.secret = "secret" +metrics.prometheus.auth.username = "metrics" +metrics.prometheus.enable = true +server.listener.http.bind = "0.0.0.0:8080" +server.listener.http.protocol = "http" +server.listener.https.bind = "0.0.0.0:443" +server.listener.https.protocol = "http" +server.listener.https.tls.implicit = true +server.listener.imap.bind = "0.0.0.0:143" +server.listener.imap.protocol = "imap" +server.listener.imaptls.bind = "0.0.0.0:993" +server.listener.imaptls.protocol = "imap" +server.listener.imaptls.tls.implicit = true +server.listener.pop3.bind = "0.0.0.0:110" +server.listener.pop3.protocol = "pop3" +server.listener.pop3s.bind = "0.0.0.0:995" +server.listener.pop3s.protocol = "pop3" +server.listener.pop3s.tls.implicit = true +server.listener.sieve.bind = "0.0.0.0:4190" +server.listener.sieve.protocol = "managesieve" +server.listener.smtp.bind = "0.0.0.0:25" +server.listener.smtp.protocol = "smtp" +server.listener.submission.bind = "0.0.0.0:587" +server.listener.submission.protocol = "smtp" +server.listener.submissions.bind = "0.0.0.0:465" +server.listener.submissions.protocol = "smtp" +server.listener.submissions.tls.implicit = true +server.max-connections = 8192 +server.socket.backlog = 1024 +server.socket.nodelay = true +server.socket.reuse-addr = true +server.socket.reuse-port = true +storage.blob = "rocksdb" +storage.data = "rocksdb" +storage.directory = "ldap" +storage.fts = "rocksdb" +storage.lookup = "rocksdb" +store.rocksdb.compression = "lz4" +store.rocksdb.path = "/opt/stalwart/data" +store.rocksdb.type = "rocksdb" +tracer.console.ansi = true +tracer.console.buffered = true +tracer.console.enable = true +tracer.console.level = "trace" +tracer.console.lossy = false +tracer.console.multiline = false +tracer.console.type = "stdout" diff --git a/devtools/deployments/opencloud_full/debug-opencloud-paused.yml b/devtools/deployments/opencloud_full/debug-opencloud-paused.yml new file mode 100644 index 0000000000..acd010de73 --- /dev/null +++ b/devtools/deployments/opencloud_full/debug-opencloud-paused.yml @@ -0,0 +1,7 @@ +--- +services: + + opencloud: + command: [ "-c", "opencloud init || true; dlv --listen=:40000 --headless=true --check-go-version=false --api-version=2 --accept-multiclient exec /usr/bin/opencloud server" ] + ports: + - 40000:40000 diff --git a/devtools/deployments/opencloud_full/docker-compose.yml b/devtools/deployments/opencloud_full/docker-compose.yml index 99179f58aa..52e17195ba 100644 --- a/devtools/deployments/opencloud_full/docker-compose.yml +++ b/devtools/deployments/opencloud_full/docker-compose.yml @@ -31,6 +31,7 @@ services: - "--accessLog=true" - "--accessLog.format=json" - "--accessLog.fields.headers.names.X-Request-Id=keep" + - "--accessLog.fields.headers.names.Trace-Id=keep" ports: - "80:80" - "443:443" diff --git a/devtools/deployments/opencloud_full/keycloak.yml b/devtools/deployments/opencloud_full/keycloak.yml index 3a01dffc38..e61a121562 100644 --- a/devtools/deployments/opencloud_full/keycloak.yml +++ b/devtools/deployments/opencloud_full/keycloak.yml @@ -57,6 +57,8 @@ services: KC_FEATURES: impersonation KEYCLOAK_ADMIN: ${KEYCLOAK_ADMIN_USER:-admin} KEYCLOAK_ADMIN_PASSWORD: ${KEYCLOAK_ADMIN_PASSWORD:-admin} + ports: + - "8080:8080" labels: - "traefik.enable=true" - "traefik.http.routers.keycloak.entrypoints=https" diff --git a/devtools/deployments/opencloud_full/ldap.yml b/devtools/deployments/opencloud_full/ldap.yml index bd30d758d4..a874e0df95 100644 --- a/devtools/deployments/opencloud_full/ldap.yml +++ b/devtools/deployments/opencloud_full/ldap.yml @@ -24,6 +24,7 @@ services: OC_LDAP_SERVER_WRITE_ENABLED: "false" # assuming the external ldap is not writable # OC_RUN_SERVICES specifies to start all services except glauth, idm and accounts. These are replaced by external services OC_EXCLUDE_RUN_SERVICES: idm + STALWART_AUTH_DIRECTORY: "ldap" ldap-server: image: bitnamilegacy/openldap:2.6 @@ -39,6 +40,9 @@ services: LDAP_TLS_KEY_FILE: /opt/bitnami/openldap/share/openldap.key LDAP_ROOT: "dc=opencloud,dc=eu" LDAP_ADMIN_PASSWORD: ${LDAP_ADMIN_PASSWORD:-admin} + LDAP_CONFIGURE_PPOLICY: "yes" + LDAP_PPOLICY_USE_LOCKOUT: "no" + LDAP_PPOLICY_HASH_CLEARTEXT: "no" ports: - "127.0.0.1:389:1389" - "127.0.0.1:636:1636" diff --git a/devtools/deployments/opencloud_full/opencloud.yml b/devtools/deployments/opencloud_full/opencloud.yml index afe703140d..a3fa12d599 100644 --- a/devtools/deployments/opencloud_full/opencloud.yml +++ b/devtools/deployments/opencloud_full/opencloud.yml @@ -58,6 +58,11 @@ services: COMPANION_DOMAIN: ${COMPANION_DOMAIN:-companion.opencloud.test} # enable to allow using the banned passwords list OC_PASSWORD_POLICY_BANNED_PASSWORDS_LIST: banned-password-list.txt + IDM_REVASVC_PASSWORD: "admin" + AUTH_BASIC_LDAP_BIND_PASSWORD: "admin" + USERS_LDAP_BIND_PASSWORD: "admin" + GROUPS_LDAP_BIND_PASSWORD: "admin" + IDM_LDAPS_ADDR: 0.0.0.0:9235 volumes: - ./config/opencloud/app-registry.yaml:/etc/opencloud/app-registry.yaml - ./config/opencloud/csp.yaml:/etc/opencloud/csp.yaml diff --git a/devtools/deployments/opencloud_full/stalwart.yml b/devtools/deployments/opencloud_full/stalwart.yml new file mode 100644 index 0000000000..92e796c404 --- /dev/null +++ b/devtools/deployments/opencloud_full/stalwart.yml @@ -0,0 +1,36 @@ +--- +services: + traefik: + networks: + opencloud-net: + aliases: + - ${STALWART_DOMAIN:-stalwart.opencloud.test} + + stalwart: + image: ghcr.io/stalwartlabs/stalwart:v0.15.0-alpine + hostname: ${STALWART_DOMAIN:-stalwart.opencloud.test} + networks: + - opencloud-net + ports: + - "127.0.0.1:143:143" + - "127.0.0.1:993:993" + - "127.0.0.1:1465:465" + volumes: + - /etc/localtime:/etc/localtime:ro + - "./config/stalwart/${STALWART_AUTH_DIRECTORY:-idmldap}.toml:/opt/stalwart/etc/config.toml" + - stalwart-data:/opt/stalwart/data + environment: + STALWART_AUTH_DIRECTORY: "${STALWART_AUTH_DIRECTORY:-idmldap}" + labels: + - "traefik.enable=true" + - "traefik.http.routers.stalwart.entrypoints=https" + - "traefik.http.routers.stalwart.rule=Host(`${STALWART_DOMAIN:-stalwart.opencloud.test}`)" + - "traefik.http.routers.stalwart.tls.certresolver=http" + - "traefik.http.routers.stalwart.service=stalwart" + - "traefik.http.services.stalwart.loadbalancer.server.port=8080" + logging: + driver: ${LOG_DRIVER:-local} + restart: always + +volumes: + stalwart-data: diff --git a/docs/adr/0002-groupware-authentication-with-stalwart.md b/docs/adr/0002-groupware-authentication-with-stalwart.md new file mode 100644 index 0000000000..af93d0fb4a --- /dev/null +++ b/docs/adr/0002-groupware-authentication-with-stalwart.md @@ -0,0 +1,343 @@ +--- +title: "Authentication with Stalwart" +--- + +* Status: draft + +## Context + +In a groupware environment, not every user will always use the OpenCloud UI to read their emails, some will resort to other [MUAs (Mail User Agents)](https://en.wikipedia.org/wiki/Email_client) that support a subset of features, use older protocols (IMAP, POP, SMTP, CalDAV, CardDAV) and lesser authentication methods (basic authentication). Those email clients will talk to Stalwart directly, as opposed to the OpenCloud UI which will make use of APIs of the OpenCloud Groupware service, since those protocols are provided by Stalwart and implementing them in OpenCloud would offer very little benefits, but definitely a lot of (almost completely) unnecessary effort. + +Those protocols and operations that bypass the OpenCloud UI also need to be authenticated, this in and by Stalwart, and we need to find the best fitting approach that fulfills most or all of the following constraints: + +### Single Provisioning + +We want to avoid multiple provisioning of users, groups, passwords and other resources as much as possible. +While it is possible to have e.g. OpenCloud's user management also perform [Management API](https://stalw.art/docs/category/management-api/) calls, one still inevitably ends up in situations where users, user passwords, or other resources are not in sync, which becomes complex to debug and fix, and should thus be avoided if possible. + +To do so, we should strive to have a single source of truth regarding users, their passwords, and similar resources and attributes such as groups, roles, application passwords, etc... + +### Attack Detection + +Coordinated attacks such as [denial of service](https://en.wikipedia.org/wiki/Denial-of-service_attack) attempts don't necessarily focus on a single protocol but are commonly multi-pronged, e.g. by brute forcing the [OIDC API](https://www.keycloak.org/docs/latest/authorization_services/index.html#token-endpoint), the OpenCloud Groupware API, IMAP and SMTP, \*DAV protocols, etc... + +In order to detect those as well as to quickly react by blacklisting clients that are identified to attempt such attacks, it is useful to have a single authentication service for all the components of the system, all protocols, all clients (e.g. [PowerDNS Weakforced](https://github.com/PowerDNS/weakforced), [Nauthilus](https://nauthilus.org/), ...) + +Furthermore, such services typically make use of [DNSBL/RBL services](https://en.wikipedia.org/wiki/Domain_Name_System_blocklist) that allow IP addresses of botnets to be blocked across many services of many providers as a shared defense mechanism. + +As a bonus, a centralized authentication component can also provide metrics and observability capabilities across all those protocols. + +### Custom Authentication Implementations + +Some customers might want custom authentication implementations to integrate with their environment, in which case we would want those to be done once and in the technology stack we're all most familiar with (thus as a service in Go in the OpenCloud framework, and not e.g. a Lua script in Nauthilus, or a Rust plugin in Stalwart, etc...) + +## Decision Drivers + +TODO + +* + +## Considered Options + +First off, here is a brief explanation of each of the scenarios that we potentially or absolutely need to support, which we will explore for each implementation option: + +* MUAs with basic authentication + * these are external mail clients (Thunderbird, Apple Mail, ...) with which users authenticate using legacy protocols (IMAP, POP3, SMTP) and their primary username and password in clear text (encrypted through the mandatory use of TLS) +* MUAs with application password authentication + * these are external mail clients (Thunderbird, Apple Mail, ...) with which users authenticate using legacy protocols (IMAP, POP3, SMTP) and one of the application passwords that they created in the OpenCloud UI, which is a useful security mechanism as it reduces the attack surface when one such password is leaked or discovered +* MUAs with SASL bearer token authentication + * these are more modern external mail clients (Thunderbird) with which users authenticate using legacy protocols (IMAP, POP3, SMTP) but more secure OIDC token based authentication (SASL OAUTHBEARER or SASL XOAUTH2), which closely resembles the OIDC authentication used by the OpenCloud UI towards the OpenCloud backends +* JMAP clients with basic authentication + * modern mail clients (Thunderbird) that speak the JMAP protocol over HTTP and authenticate using their primary username and password in clear text (encrypted through the use of HTTPS) +* JMAP clients with bearer token authentication + * modern mail clients (Thunderbird) that speak the JMAP protocol over HTTP and authenticate using an OIDC token (JWT) obtained from an IDP (typically KeyCloak) +* OpenCloud Groupware with master authentication + * the OpenCloud UI client uses APIs from the OpenCloud Groupware backend (and authenticates using OIDC) + * the OpenCloud Groupware backend, in turn, performs JMAP operations with Stalwart, and authenticates using Stalwart's shared secret master authentication protocol +* OpenCloud Groupware with generated token authentication + * the OpenCloud UI client uses APIs from the OpenCloud Groupware backend (and authenticates using OIDC) + * the OpenCloud Groupware backend, in turn, performs JMAP operations with Stalwart, and authenticates against Stalwart using bearer authentication with JWTs that it generates itself + * in the future, that JWT might also be the JWT that the OpenCloud UI used to authenticate against the OpenCloud Groupware in the first place + +### Stalwart with the LDAP Directory + +```mermaid +flowchart LR + c(client) + s(Stalwart) + l(LDAP) + + c -- IMAP/SMTP --> s + c -- JMAP --> s + s -- LDAP --> l +``` + +Clients authenticate directly against Stalwart, that is configured to use an LDAP authentication Directory. +An LDAP server (e.g. OpenLDAP) is needed as part of the infrastructure. +OpenCloud also has to make use of the same LDAP server. + +* ✅ MUAs with basic authentication + * MUAs authenticate directly against Stalwart + * Stalwart's LDAP Directory plugin supports plain text authentication by looking up the userPassword attribute in the LDAP server +* ❌ MUAs with application password authentication + * MUAs authenticate directly against Stalwart + * Stalwart's LDAP Directory plugin does not support application password as it is hardwired to look up the password in the userPassword attribute in the LDAP server + * even if it did support looking up alternative passwords in LDAP, this would hardly be practical as the application passwords are currently created and stored in OpenCloud, which would need to be modified to store them in LDAP in the first place +* ❌ MUAs with SASL bearer token authentication + * MUAs authenticate directly against Stalwart + * Stalwart's LDAP Directory plugin does not support verifying OIDC tokens +* ✅ JMAP clients with basic authentication + * JMAP clients authenticate directly against Stalwart + * Stalwart's LDAP Directory plugin supports plain text authentication by looking up the userPassword attribute in the LDAP server +* ❌ JMAP clients with bearer token authentication + * JMAP clients authenticate directly against Stalwart + * Stalwart's LDAP Directory plugin does not support verifying OIDC tokens +* ✅ OpenCloud Groupware with master authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart detects and supports clear text password master authentication regardless of the Directory that is being used, and verifies it against the shared secret password that is configured in the server +* ❌ OpenCloud Groupware with generated token authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart's LDAP Directory plugin does not support verifying OIDC tokens + +### Stalwart with the OIDC Directory + +```mermaid +flowchart LR + c(client) + s(Stalwart) + o(IDP) + + c -- IMAP/SMTP --> s + c -- JMAP --> s + s -- OIDC HTTP --> o +``` + +Clients authenticate directly against Stalwart, that is configured to use an OIDC authentication Directory. +An OIDC IDP (server) is needed as part of the infrastructure, e.g. KeyCloak. +Optionally, an LDAP server (e.g. OpenLDAP) might be used as well, and KeyCloak would look up users and their credentials in LDAP. + +OpenCloud also has to make use of the same LDAP server, or would need to be modified to be capable of only making use of an OIDC IDP (which would include limitations that are yet to be resolved, e.g. the option of using KeyCloak Admin APIs to retreieve groups, group members, ...) + +* ❌ MUAs with basic authentication + * MUAs authenticate directly against Stalwart + * Stalwart's OIDC Directory plugin does not support plain text authentication +* ❌ MUAs with application password authentication + * MUAs authenticate directly against Stalwart + * Stalwart's OIDC Directory plugin does not support application passwords +* ❓ MUAs with SASL bearer token authentication + * MUAs authenticate directly against Stalwart + * Stalwart's OIDC Directory plugin does not currently support external IDPs, but is expected to in future versions + * as of Stalwart 0.12, this would only work if Stalwart itself is used as the IDP when acquiring a token +* ❌ JMAP clients with basic authentication + * JMAP clients authenticate directly against Stalwart + * Stalwart's OIDC Directory plugin does not support plain text authentication +* ❓ JMAP clients with bearer token authentication + * JMAP clients authenticate directly against Stalwart + * Stalwart's OIDC Directory plugin does not currently support external IDPs, but is expected to in future versions + * as of Stalwart 0.12, this would only work if Stalwart itself is used as the IDP when acquiring a token +* ✅ OpenCloud Groupware with master authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart detects and supports clear text password master authentication regardless of the Directory that is being used, and verifies it against the shared secret password that is configured in the server +* ❓ OpenCloud Groupware with generated token authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart's OIDC Directory plugin does not currently support external IDPs, but is expected to in future versions + * as of Stalwart 0.12, this would only work if Stalwart itself is used as the IDP when acquiring a token, which is not the case with this approach as the tokens are generated by the Groupware backend itself + +### Stalwart with the Internal Directory + +```mermaid +flowchart LR + c(client) + s(Stalwart) + + c -- IMAP/SMTP --> s + c -- JMAP --> s +``` + +Clients authenticate directly against Stalwart, that is configured to use an Internal authentication Directory. +Neither an OIDC IDP nor an LDAP server are needed as part of the infrastructure, as principal resources (users, groups) and their credentials exist in Stalwart's storage. + +OpenCloud would not be capable of accessing those resources, which means that provisioning of groups, users, user passwords must be duplicated and kept in sync between Stalwart and OpenCloud. + +* ✅ MUAs with basic authentication + * MUAs authenticate directly against Stalwart + * Stalwart's Internal Directory plugin supports plain text authentication +* ✅ MUAs with application password authentication + * MUAs authenticate directly against Stalwart + * Stalwart's Internal Directory plugin supports application passwords + * users are able to create those themselves using the self-service web UI of Stalwart + * they are not shared with the OpenCloud application passwords though and would need to be provisioned into Stalwart when created in OpenCloud to provide a single UI +* ❌ MUAs with SASL bearer token authentication + * MUAs authenticate directly against Stalwart + * Stalwart's Internal Directory plugin does not support OIDC token authentication +* ✅ JMAP clients with basic authentication + * JMAP clients authenticate directly against Stalwart + * Stalwart's Internal Directory plugin supports plain text authentication +* ❌ JMAP clients with bearer token authentication + * JMAP clients authenticate directly against Stalwart + * Stalwart's Internal Directory plugin does not support OIDC token authentication +* ✅ OpenCloud Groupware with master authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart detects and supports clear text password master authentication regardless of the Directory that is being used, and verifies it against the shared secret password that is configured in the server +* ❌ OpenCloud Groupware with generated token authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart's Internal Directory plugin does not support OIDC token authentication + +### Stalwart with the OpenCloud Authentication API + +```mermaid +flowchart LR + c(client) + s(Stalwart) + o(OpenCloud) + l(LDAP) + + c -- IMAP/SMTP --> s + c -- JMAP --> s + s -- REST --> o + o -- LDAP --> l +``` + +Clients authenticate directly against Stalwart, that is configured to use an "External" authentication Directory, that is yet to be developed. (warning) +Its protocol is currently not defined, but not particularly relevant at this time, as long as it supports accepting basic and bearer authentication in order to authenticate both username and password credentials as well as OIDC tokens. + +That External Directory implementation forwards the basic or bearer credentials to an endpoint in the OpenCloud backend, that the responds with whether the authentication is successful or not, as well as with additional information that is needed for Stalwart (email address, display name, groups, roles, ...) + +* ✅ MUAs with basic authentication + * MUAs authenticate directly against Stalwart + * Stalwart's External Directory supports plain text authentication by relaying the authentication operation to the OpenCloud backend, which can then authenticate users by username and password using an LDAP server + * note that this option requires having an LDAP server in the environment, including having it accessible by OpenCloud + * if that is not the case, then a viable option is also to support OIDC tokens and application passwords + * to clarify: this scenario is only about supporting authentication using the "primary" username and password +* ✅ MUAs with application password authentication + * MUAs authenticate directly against Stalwart + * Stalwart's External Directory supports application password authentication by relaying the authentication operation to the OpenCloud backend, which can then authenticate against its list of application passwords + * this is the ideal scenario for application passwords, since they are already supported by OpenCloud, and can be created and managed using the OpenCloud UI + * relaying the authentication operation to OpenCloud also prevents the need for duplicate provisioning of application passwords +* ✅ MUAs with SASL bearer token authentication + * MUAs authenticate directly against Stalwart + * Stalwart's External Directory supports OIDC token authentication by relaying the authentication operation to the OpenCloud backend, which can then either perform local token inspection and authentication by verifying the token's signature, or use the OIDC IDP's token introspection endpoint +* ✅ JMAP clients with basic authentication + * JMAP clients authenticate directly against Stalwart + * Stalwart's External Directory supports plain text authentication by relaying the authentication operation to the OpenCloud backend, which can then authenticate users by username and password using an LDAP server + * the same limitations/requirements as for the "MUAs with basic authentication" scenario apply here as well +* ✅ JMAP clients with bearer token authentication + * MUAs authenticate directly against Stalwart + * Stalwart's External Directory supports OIDC token authentication by relaying the authentication operation to the OpenCloud backend, which can then either perform local token inspection and authentication by verifying the token's signature, or use the OIDC IDP's token introspection endpoint +* ✅ OpenCloud Groupware with master authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart detects and supports clear text password master authentication regardless of the Directory that is being used, and verifies it against the shared secret password that is configured in the server +* ✅ OpenCloud Groupware with generated token authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * in the worst case, the External Directory plugin in Stalwart would also perform a forwarding of the authentication operation to OpenCloud, which would obviously be able to verify a token it has created + * an optimization might be possible here, if the External Directory implementation permits for the configuration of specific issuers which should then be verifying against a JWK set directly, whereas the fallback behaviour would be to query the OpenCloud Authentication API + +### Stalwart with Nauthilus and LDAP + +```mermaid +flowchart LR + c(client) + s(Stalwart) + n(Nauthilus) + l(LDAP) + + c -- IMAP/SMTP --> s + c -- JMAP --> s + s -- REST --> n + n -- LDAP --> l +``` + +In this scenario, we introduce the [Nauthilus authentication service](https://nauthilus.org/), which has its own API but also a KeyCloak integration plugin. +It supports various backends and can also be scripted for more complex combinations. + +⚠️ It would require the implementation of a Stalwart Nauthilus Directory, **that is yet to be developed**. + +We do not make use of any OpenCloud Authentication API but, instead, attempt to have everything go through Nauthilus instead, backed by an LDAP server that then contains the users, groups, and user passwords. + +The upside of using Nauthilus is that it does brute force attack detection and can provide metrics across multiple protocols and clients in a centralized fashion. + +* ✅ MUAs with basic authentication + * MUAs authenticate directly against Stalwart + * Stalwart's Nauthilus Directory supports plain text authentication by relaying the authentication operation to Nauthilus, e.g. using its JSON API + * Nauthilus provides a response that contains user attributes from LDAP (display name, email addresses, ...) +* ❓ MUAs with application password authentication + * Nauthilus has no support for application passwords in itself + * a Lua plugin could potentially be used in Nauthilus to detect whether the clear text password matches a regular expression for application passwords and, if that is the case, first attempt to verify it through an API call (that does not exist yet) to the OpenCloud backend, but that would definitely be more complex and less elegant than having a single API +* ❓ MUAs with SASL bearer token authentication + * it is currently unclear whether Nauthilus supports OIDC token authentication +* ✅ JMAP clients with basic authentication + * Stalwart's Nauthilus Directory supports plain text authentication by relaying the authentication operation to Nauthilus, e.g. using its JSON API + * Nauthilus provides a response that contains user attributes from LDAP (display name, email addresses, ...) +* ❓ JMAP clients with bearer token authentication + * it is currently unclear whether Nauthilus supports OIDC token authentication +* ✅ OpenCloud Groupware with master authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart detects and supports clear text password master authentication regardless of the Directory that is being used, and verifies it against the shared secret password that is configured in the server +* ❓ OpenCloud Groupware with generated token authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * it is currently unclear whether Nauthilus supports OIDC token authentication + * an optimization might be possible here, if the Nauthilus Directory implementation permits for the configuration of specific issuers which should then be verifying against a JWK set directly, whereas the fallback behaviour would be to query the Nauthilus API, but that does sound like a stretch to fit into the concept + +### Stalwart with Nauthilus and an OpenCloud Authentication API + +```mermaid +flowchart LR + c(client) + j(client) + s(Stalwart) + n(Nauthilus) + o(OpenCloud) + l(LDAP) + k(Keycloak) + + c -- IMAP/SMTP --> s + j -- JMAP --> s + s -- REST --> n + subgraph internal auth + n -- REST --> o + o -- LDAP --> l + o -- OIDC --> k + end +``` + +This option also makes use of the [Nauthilus authentication service](https://nauthilus.org/), but instead of it using LDAP to resolve users, we would either make use of its Lua scripting abilities to implement a backend that performs HTTP calls to an OpenCloud Authentication API, or implement an additional Nauthilus backend that uses the Nauthilus API to delegate to another instance, which would then be the OpenCloud Authentication API with support for the Nauthilus API. + +⚠️ As with the previous option, it would require the implementation of a Stalwart Nauthilus Directory, **that is yet to be developed**. + +Interestingly, if the OpenCloud Authentication API follows the Nauthilus API, this scenario can easily be degraded by dropping Nauthilus and, instead, having all services talk to the OpenCloud Authentication API directly. + +* ✅ MUAs with basic authentication + * MUAs authenticate directly against Stalwart + * Stalwart's Nauthilus Directory supports plain text authentication by relaying the authentication operation to Nauthilus, e.g. using its JSON API + * Nauthilus provides a response that contains user attributes from LDAP (display name, email addresses, ...) +* ✅ MUAs with application password authentication + * Nauthilus would forward the authentication request to the OpenCloud Authentication API, which would support application passwords +* ❓ MUAs with SASL bearer token authentication + * it is currently unclear whether Nauthilus supports OIDC token authentication and whether it would be able to forward such requests to the OpenCloud Authentication API +* ✅ JMAP clients with basic authentication + * Stalwart's Nauthilus Directory supports plain text authentication by relaying the authentication operation to Nauthilus, e.g. using its JSON API + * Nauthilus then forwards that request to the OpenCloud Authentication API + * the OpenCloud Authentication API, and then Nauthilus, provides a response that contains user attributes from LDAP (display name, email addresses, ...) or claims from the JWT +* ❓ JMAP clients with bearer token authentication + * it is currently unclear whether Nauthilus supports OIDC token authentication and whether it would be able to forward such requests to the OpenCloud Authentication API +* ✅ OpenCloud Groupware with master authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * Stalwart detects and supports clear text password master authentication regardless of the Directory that is being used, and verifies it against the shared secret password that is configured in the server +* ❓ OpenCloud Groupware with generated token authentication + * the OpenCloud Groupware backend authenticates directly against Stalwart + * it is currently unclear whether Nauthilus supports OIDC token authentication and whether it would be able to forward such requests to the OpenCloud Authentication API + +> [!IMPORTANT] +> We need to clarify whether the Nauthilus API allows for a JWT to be submitted for the authentication request, and not only username and password – not to secure the request in itself, but to forward an OIDC token based authentication attempt as part of the payload. + +### Comparing Options + +| | MUA basic | MUA app password | MUA sasl | JMAP clients with basic auth | JMAP clients with JWT auth | Groupware Middleware with master auth | Groupware Middleware with JWT auth | +| --- | --- | --- | --- | --- | --- | --- | --- | +| Stalwart 0.12 with LDAP Directory | ✅ MUA → Stalwart | ❌ not supported with LDAP | ❌ not supported with LDAP | ✅ | ❌ | ✅ | ❌ | +| Stalwart 0.12 with OIDC Directory | ❌ | ❌ | ❌ | ❌ | ✅ | ✅ | ❌ | +| Stalwart 0.12 with Internal Directory | ✅ MUA → Stalwart, must be provisioned in Stalwart | ✅ MUA → Stalwart, must be provisioned in Stalwart | ❌ | ❌ | ❌ unless using Stalwart as IDP | ✅ | ❌ | +| Stalwart + OpenCloud Authentication API | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ | ✅ | +| Stalwart + Nauthilus + LDAP | ✅ MUA → IMAP proxy → Nauthilus → LDAP | ❌ | ❌ | ✅ | ❌ | ✅ | ❌ | +| Stalwart + Nauthilus + OpenCloud Authentication API | ✅ MUA → IMAP proxy → Nauthilus → OpenCloud | ✅ MUA → IMAP proxy → Nauthilus → OpenCloud | ✅ MUA → IMAP proxy → Nauthilus → OpenCloud | ✅ MUA → IMAP proxy → Nauthilus → OpenCloud | ✅ MUA → IMAP proxy → Nauthilus → OpenCloud | ✅ | ✅ | +| Stalwart + Nauthilus-like OpenCloud Authentication API | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ MUA → Stalwart → OpenCloud | ✅ | ✅ | diff --git a/docs/adr/0003-groupware-microservice-vs-oc-integration.md b/docs/adr/0003-groupware-microservice-vs-oc-integration.md new file mode 100644 index 0000000000..18dc9ba636 --- /dev/null +++ b/docs/adr/0003-groupware-microservice-vs-oc-integration.md @@ -0,0 +1,64 @@ +--- +status: proposed +date: 2025-06-24 +author: Pascal Bleser +decision-makers: +consulted: +informed: +title: "Implementing Groupware as a separate Microservice vs integrated in the OpenCloud Stack" +template: https://raw.githubusercontent.com/adr/madr/refs/tags/4.0.0/template/adr-template.md +--- + +* Status: draft + +## Context + +Should the Groupware backend be an independent microservice or be part of the OpenCloud single binary framework? + +The OpenCloud backend is built on a framework that + +* implements token based authentication between services +* allows for a "single binary" deployment mode that runs all services within that one binary +* integrates services such as a NATS event bus + +This decision is about whether the Groupware backend service should be implemented within that framework or, instead, be implemented as a standalone backend service. + +## Decision Drivers + +* single binary deployment strategy is potentially important (TODO how important is it really? stakeholders:?) + +## Considered Options + +* have the Groupware Middleware as an independent microservice +* have the Groupware Middleware implemented within the existing OpenCloud framework + +## Decision Outcome + +TODO + +### Consequences + +TODO + +### Confirmation + +TODO + +## Pros and Cons of the Options + +### Independent Microservice + +* (potentially) good: be free from technical decisions made for the existing OpenCloud stack, to avoid carrying potential technical baggage +* (potentially) good: make use of a framework that is more fitting for the tasks the Groupware backend needs to accomplish +* bad: re-implement framework components that already exist, with the need to maintain those in two separate codebases, or the added complexity of a shared library repository +* bad: not have the ability to include the Groupware backend in the single binary deployment +* neutral: a separate code repository and delivery for the Groupware backend, which might or might not be of advantage +* neutral: may be implemented on a completely different technology stack, including the programming language + +### Part of the framework + +* good: fit into the opinionated choices that were made for the OpenCloud framework so far +* good: many aspects are already implemented in the current framework and can be made use of, potentially enhanced for the needs of the Groupware backend +* good: the ability to include the Groupware backend in the single binary deployment +* neutral: be in the same code repository and part of the same delivery as other services in OpenCloud +* neutral: must be implemented in Go on top of the same technology stack diff --git a/docs/adr/0004-groupware-resource-linking.md b/docs/adr/0004-groupware-resource-linking.md new file mode 100644 index 0000000000..12dedf101c --- /dev/null +++ b/docs/adr/0004-groupware-resource-linking.md @@ -0,0 +1,294 @@ +--- +status: proposed +date: 2025-06-24 +author: Pascal Bleser +decision-makers: +consulted: +informed: +title: "Resource Linking" +template: https://raw.githubusercontent.com/adr/madr/refs/tags/4.0.0/template/adr-template.md +--- + +* Status: draft + +## Context + +Which semantic and technical approach to take in order to provide strong integration of the various products and capabilities of OpenCloud, OpenTalk, and potentially other products as well? + +## Decision Drivers + +* a strong integration that allows users to access resources and relationships without having to switch views, which translates into a "mental switch" as well +* an innovative approach that differs from the traditional way groupware applications have been designed in the past +* TODO more decision drivers from PM +* a model that is open and generic enough to integrate many different types of resources and relationships +* a model that allows for independent and incremental upgrades to the resources and relationships that can be contributed by each service + +## Considered Options + +* resource linking +* application launchers +* TODO? can we come up with more ideas? + +## Decision Outcome + +TODO + +### Consequences + +TODO + +### Confirmation + +TODO + +## Pros and Cons of the Options + +### Resource Linking + +This concept primarily resides on the idea of having resources, which have attributes, and relations between them, pretty much as [RDF (Resource Description Framework)](https://www.w3.org/RDF/) does, where the Groupware backend provides services to explore relations of a given resource. + +* good: decoupling of UI, backends as well as other participants, as backends can gradually evolve the relationships and resources they understand and can contribute to over time, as well as for the UI that may just silently ignore resources it does not support yet or does not want to present to the user +* good: potential for an asynchronous architecture that would enable the UI to present some resources early without having to wait for those that require more processing time or are provided by services that happen to be under heavier load +* good: it should provide ammunition for a modern and original UI that is centered around resources and relationships rather than the usual visual paradigms +* bad: it might be a challenge to implement this approach in a performant way with rapid response times, as it could cause additional complexity and storage services (e.g. to denormalize reverse indexes, cache expensive resource graphs, etc...) + +#### URNs + +Each resource has a unique identifier, for which [URNs (Uniform Resource Names)](https://www.rfc-editor.org/rfc/rfc1737) seem the best representation. + +URNs are composed of + +* a namespace identifier +* a namespace-specific string + +As a convention, we will use the following: + + + + + + + + + + + + + + + + + +
urn:nsnamespace specific string
urn:oc:<type>:<unique identifier>:
+ +##### Examples + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
urn:namespacetypeunique id
urn:oc:user:camina.drummer
urn:oc:contact:klaes.ashford
urn:oc:event:dd4ea520-e414-41e1-b545-b1c7d4ce57e7
urn:oc:mail:<1e8074e8-cd56-4358-9f9e-f17cb701b950@opa.org>
+ +#### Exploration API + +Whenever the user puts a resource into focus in the OpenCloud Groupware UI (i.e. by selecting/clicking that resource, e.g. the sender of an email), it may send a request to the Groupware service API to inquire about related resources. + +What those related resources are still stands to be determined, but examples could be along the lines of + +* unread emails from the same sender +* emails exchanged with that sender in the last 7 days +* files recently shared with that user +* spaces or groups in common with that user +* OpenTalk meetings planned within the next 3 days + +In order to decouple the Groupware service from which resources and relations are supported, + +* whenever such an exploration request is received, the Groupware service forwards it to all known services, in a "fan-out" model +* each service can understand the focused resource, or not, but if it does it may return related resources that it is capable of providing using its data model (e.g. OpenTalk providing related meeting resources, OpenCloud Groupware providing related calendar events, contacts, mails, etc...) +* ideally, that happens in an asynchronous fashion, using e.g. [SSE (Server Side Events)](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events) to push results to the OpenCloud UI to avoid having to wait for the slowest contributor, although that pushes the "reduce" part of this ["map-reduce" operation](https://en.wikipedia.org/wiki/MapReduce) to the client + +```mermaid +graph LR + c(client) + subgraph backend + a(opencloud api) + g(groupware) + ot(opentalk) + u(users) + s(stalwart) + k(keycloak) + end + subgraph storage + ots@{ shape: cyl, label: "opentalk\nstorage"} + ss@{ shape: cyl, label: "stalwart\nstorage"} + l@{ shape: cyl, label: "ldap"} + end + c-->|/related/urn:oc:user:camina.drummer|a + a-->|/related/urn:oc:user:camina.drummer|g + g-->s + s-->ss + a-->|/related/urn:oc:user:camina.drummer|ot + ot-->ots + a-->|/related/urn:oc:user:camina.drummer|u + u-->k + k-->l + + ot-.->|urn:oc:meeting:232403bc-b98f-4643-a917-80bdcfc7aaba|a + g-.->|urn:oc:event:e5193ad3-8f1c-4162-8593-69fe659bcc08|a +``` + +This allows a decoupling of all the participants, enabling each service to add, remove or alter relationships that it is able to contribute for a given resource type. + +Obviously, the UI needs to be able to understand resource types to know how to represent them, but if it silently ignores resource types that it does not know of, backends can evolve independently from the UI. + +#### JSON-LD + +[JSON-LD (JSON for Linking Data)](https://json-ld.org/) seems like a potent representation format for those relationships in a REST environment. + +It could look something like this: + +```json +{ + "@context": { + "@user": "https://schema.opencloud.eu/user.jsonld", + "link": "https://schema.opencloud.eu/linked.jsonld" + }, + "@type": "urn:oc:type:user", + "@id": "urn:oc:user:cdrummer", + "name": "Camina Drummer", + "email": "camina@opa.org", + "roles": ["admin", "pirate"], + "link:rooms": [ + { + "@context": { + "@room": "https://meta.opencloud.eu/room.jsonld", + "link": "https://schema.opencloud.eu/linked.jsonld" + }, + "@id": "urn:oc:room:a3f19df6-6c7d-45fa-b16c-6e168e2a2a43", + "name": "OPA Leadership Standup 2355-02-27", + "start": "2355-02-27T10:58:15.918Z", + "end": "2355-02-27T13:52:59.010Z", + "started_by": { + "@context": "https://meta.opencloud.eu/user.jsonld", + "@type": "urn:oc:type:user", + "@id": "urn:oc:user:adawes", + "name": "Anderson Dawes", + "email": "anderson@opa.org" + }, + "link:events": [ + { + "@context": "https://meta.opencloud.eu/event.jsonld", + "@type": "urn:oc:type:event", + "@id": "urn:oc:event:3e041c88-088c-4015-a32e-5560561f6e26", + "start": "2355-02-27T11:09:15.918Z", + "end": "2355-02-27T13:52:59.010Z", + "status": "confirmed", + "invited": [ + { + "@context": "https://meta.opencloud.eu/user.jsonld", + "@type": "urn:oc:type:user", + "@id": "urn:oc:user:adawes", + "name": "Anderson Dawes", + "email": "anderson@opa.org" + }, + { + "@context": "https://meta.opencloud.eu/user.jsonld", + "@type": "urn:oc:type:user", + "@id": "urn:oc:user:kashford", + "name": "Klaes Ashford", + "email": "klaes@opa.org" + } + ] + } + ], + "members": [ + { + "@context": "https://meta.opencloud.eu/contact.jsonld", + "@type": "urn:oc:type:contact", + "@id": "urn:oc:contact:9ccb247d-a728-4d8f-9259-c28cf6cef567", + "name": "Naomi Nagata", + "email": "naomo@opa.org" + }, + { + "@context": "https://meta.opencloud.eu/user.jsonld", + "@type": "urn:oc:type:user", + "@id": "urn:oc:user:adawes", + "name": "Anderson Dawes", + "email": "anderson@opa.org" + }, + { + "@context": "https://meta.opencloud.eu/user.jsonld", + "@type": "urn:oc:type:user", + "@id": "urn:oc:user:kashford", + "name": "Klaes Ashford", + "email": "klaes@opa.org" + } + ], + "chat": { + "@context": "https://meta.opencloud.eu/file.jsonld", + "@type": "urn:oc:type:file", + "@id": "urn:oc:file:OPA:chatlogs/2355/02/27/a3f19df6-6c7d-45fa-b16c-6e168e2a2a43.md", + "href": "https://cloud.opencloud.eu/spaces/OPA/chatlogs/2355/02/27/a3f19df6-6c7d-45fa-b16c-6e168e2a2a43.md" + } + } + ], + "link:mails": [ + { + "@context": "https://meta.opencloud.eu/mail.jsonld", + "@type": "urn:oc:type:mail", + "@id": "583b9b66-c0b3-41ba-bf6c-a02ec5f4a638@smtp-07.opa.org", + "subject": "About bosmang Fred Johnson", + "date": "2355-01-03T09:39:44.919Z" + }, + ... + ], + "link:shares": [ + { + "@context": "https://meta.opencloud.eu/share.jsonld", + "@type": "urn:oc:type:share", + "@id": "841ef259-584d-4ce6-827f-b53f900c988d", + "filename": "remember the cant.jpg" + } + ] +} +``` + +### Application Launchers + +Have a UI that is comprised of multiple more-or-less separate applications, with an application launcher bar, with each application being an icon in itself in that launcher. + +Similar to what e.g. Google does, or Open-Xchange App Suite. + +* bad: does not make for an integrated application paradigm since users still have to context switch between those applications/views to perform tasks + diff --git a/docs/adr/0005-groupware-software-stack.md b/docs/adr/0005-groupware-software-stack.md new file mode 100644 index 0000000000..3e7acd355b --- /dev/null +++ b/docs/adr/0005-groupware-software-stack.md @@ -0,0 +1,62 @@ +--- +status: proposed +date: 2025-06-25 +author: Pascal Bleser +decision-makers: +consulted: +informed: +title: "Groupware Software Stack" +template: https://raw.githubusercontent.com/adr/madr/refs/tags/4.0.0/template/adr-template.md +--- + +* Status: draft + +## Context + +Which software stack to choose for the implementation of the OpenCloud Groupware service? + +## Considered Options + +* [Go](https://go.dev/) with the OpenCloud framework, as it is used in OpenCloud +* Rust, as it is a similarly modern language, with know-how in Opentalk +* Java with an opinionated microservice framework (e.g. [Micronaut](https://micronaut.io/)) + +## Decision Outcome + +The decision was taken to go with the existing Go technology stack used in OpenCloud, since it allows for + +* everyone in the Groupware backend team to contribute +* having a single technology stack across all OpenCloud backend features +* having the option of a single binary deployment + +### Consequences + +TODO + +### Confirmation + +TODO + +## Pros and Cons of the Options + +### Go + +* good: established in the OpenCloud team, with expertise, potentially broadening the team that can contribute to Groupware development +* good: make use of the existing infrastructure and framework, including the single binary deployment option +* bad: less mature and capable technology stack, potentially problematic with regards to lack of asynchronous I/O and streamed HTTP processing + +### Rust + +* good: shared knowledge with the team of developers at OpenTalk +* bad: little to no experience in the current OpenCloud team + +### Java + +* bad: little to no experience in the current OpenCloud team, with exception of the Groupware members +* good: extensive experience with Micronaut with one OpenCloud developer +* good: opinionated and well documented +* good: cloud native +* good: mature technology stack +* good: asynchronous I/O and virtual threads make for efficient resource usage +* potentially bad: likely to not fit well into low resource environments (although native compilation using GraalVM is possible) +* potentially bad: prevents the single binary deployment option from including Groupware diff --git a/docs/adr/0006-groupware-stalwart-as-backend.md b/docs/adr/0006-groupware-stalwart-as-backend.md new file mode 100644 index 0000000000..cdb34cc3e1 --- /dev/null +++ b/docs/adr/0006-groupware-stalwart-as-backend.md @@ -0,0 +1,73 @@ +--- +status: proposed +date: 2025-06-25 +author: Pascal Bleser +decision-makers: +consulted: +informed: +title: "Stalwart as Groupware Backend" +template: https://raw.githubusercontent.com/adr/madr/refs/tags/4.0.0/template/adr-template.md +--- + +* Status: draft + +## Context + +Which Groupware backend should be used? + +## Considered Options + +* [Stalwart](https://stalw.art/), contains not only mail but also collaborative features in an integrated package +* traditional IMAP/POP/SMTP stacks (e.g. [Dovecot](https://www.dovecot.org/) + [Postfix](https://www.postfix.org/)) + +## Decision Outcome + +The decision was made to go with Stalwart, as it reduces the implementation effort on our end, allowing us for a much faster time-to-market with a significantly smaller team of developers. + +### Consequences + +We will most probably not need to develop much of a calendar or contact stack ourselves, as Stalwart is planning to implement those as part of the upcoming [JMAP](https://jmap.io/spec-core.html) specifications for [contacts](https://jmap.io/spec-contacts.html) and [calendars](https://jmap.io/spec-calendars.html). + +The Groupware API will largely consist of a translation of high-level operations for the UI into JMAP operations sent to Stalwart. + +#### Risks + +On the flip side, there are a number of risks associated with that decision. + +* Stalwart underdelivers on its promises + * calendaring provides insufficient features for our implementation (e.g. event series handling being too basic) + * not scaling for large deployments + * necessary adaptations (e.g. for authentication integration) are rejected upstream + * etc... + +### Confirmation + +TODO + +## Pros and Cons of the Options + +### Stalwart + +* good: integrated package that contains IMAP/POP, SMTP, anti-spam, AI, encryption at rest and many other features in one +* good: modern stack +* good: capable of fault tolerance in large deployments through its use of [FoundationDB](https://www.foundationdb.org/) +* bad: relatively new project with few to no large scale productive deployments (yet) +* bad: significant human [SPoF](https://en.wikipedia.org/wiki/Single_point_of_failure)/[bus factor](https://en.wikipedia.org/wiki/Bus_factor) issue as the development team currently consists of one +* good: supports and drives the JMAP protocol ([JMAP Core](https://jmap.io/spec-core.html), [JMAP Mail](https://jmap.io/spec-mail.html), [JMAP Contacts](https://jmap.io/spec-contacts.html), [JMAP Calendars](https://jmap.io/spec-calendars.html), [JMAP Tasks](https://jmap.io/spec-tasks.html), ...), + * which provides more high-level operations that we don't need to implement ourselves, + * as well as a much cleaner specification that reduces efforts too, + * and additionally can be implemented with an efficient stateless HTTP I/O stack +* bad: no viable broad JMAP implementation alternatives in case Stalwart does not deliver ([Apache James](https://james.apache.org/) only seems to support a basic subset of JMAP) +* good: implements a lot of Groupware "business logic" on its own, reducing the implementation effort on our end, + * most notably by not having to deal with IMAP extensions and quirks, + * or the complexity of calendar events + +### IMAP/SMTP + +* good: there are a number of alternatives in case a specific implementation does not deliver +* good: the best implementation candidates are well-established, used in large amounts of productive deployments, supported by teams of developers +* bad: more complex stack composed of numerous components as opposed to an all-in-one implementation +* bad: the effort and complexity of having to deal with IMAP, + * its complexity due to its extensions and its many quirks, + * as well as a significantly less efficient I/O stack that requires stateful session handling +* bad: requires the complete implementation of contacts, calendars and tasks in our own stack, as none of those services are provided by IMAP/SMTP backends diff --git a/docs/adr/0007-groupware-webui-api.md b/docs/adr/0007-groupware-webui-api.md new file mode 100644 index 0000000000..efe12f70a1 --- /dev/null +++ b/docs/adr/0007-groupware-webui-api.md @@ -0,0 +1,512 @@ +--- +status: accepted +date: 2025-07-22 +author: pbleser-oc +consulted: AlexAndBear, butonic, dragotin, fschade, JammingBen, kulmann, martinherfurth, micbar, rhafer +title: "API for the Groupware Web UI" +--- + + +## Context + +We need a comprehensive HTTP API for the OpenCloud Web UI to provide access to the following (upcoming) modules and Groupware functionalities: + +* Mail +* Contacts +* Calendar +* Tasks +* Chat +* Configuration + +```mermaid +graph LR + subgraph clients + ui(OpenCloud UI) + muas(Other
MUAs) + end + subgraph Backend + subgraph OpenCloud + direction TB + groupware("OpenCloud
Groupware") + drive("OpenCloud
Drive") + end + stalwart(Stalwart) + end + subgraph Storage + drive_storage[(drive
storage)] + stalwart_metadata[(metadata
storage)] + stalwart_storage[(object
storage)] + end + ui x@==>|?|groupware + x@{ animate: true } + ui-->|Graph|drive + muas-->|IMAP,SMTP,*DAV|stalwart + groupware-->drive + groupware-->|JMAP|stalwart + drive-->drive_storage + stalwart-->stalwart_metadata + stalwart-->stalwart_storage +``` + +Additionally, the API must also be able to provide information about related resources and their relationships, as outlined in [the Resource Linking ADR](./0003-groupware-resource-linking.md). + +For the OpenCloud Drive services, the communication between UI client and backend services is performed via the [LibreGraph API](https://github.com/opencloud-eu/libre-graph-api), which is based on [Microsoft Graph](https://developer.microsoft.com/en-us/graph). The goal of this ADR is **not** to question or change that decision, and the choice of an option is merely for the communication with the Groupware backend. + +Communication between the OpenCloud Groupware and Stalwart will make use of the [JMAP (JSON Meta Application Protocol) protocol](https://jmap.io/spec-mail.html). + +The API for the OpenCloud Web UI is **not** supposed to be an abstraction of that and thus may use JMAP data formats. + +Other [MUAs (Mail User Agents)](https://en.wikipedia.org/wiki/Email_client) converse directly with Stalwart using [IMAP](https://en.wikipedia.org/wiki/Internet_Message_Access_Protocol) or [POP3](https://en.wikipedia.org/wiki/Post_Office_Protocol), [SMTP](https://en.wikipedia.org/wiki/Simple_Mail_Transfer_Protocol), [CalDAV](https://en.wikipedia.org/wiki/CalDAV), [CardDAV](https://en.wikipedia.org/wiki/CardDAV), or JMAP itself. + +This ADR concerns the decision regarding which API approach/process/technology/specification to use, not the details of the data model and such, which will need to be fleshed out following the requirements and priorities of the OpenCloud UI Client development, regardless of the selected approach. + +## Decision Drivers + +### UI Driven + +The decision must be significantly driven by the OpenCloud UI Client developers, since they are the primary consumers of the API. + +They will also be the sole consumers for a foreseeable while until the OpenCloud Groupware UI reaches a stable feature-complete milestone, which is the earliest point in time for the APIs to be considered stable as well and potentially be consumed by third parties. + +Backend developers are stakeholders in that aspect as well though, as the choice of API approach has an impact on the complexity, costs and maintainability of the backend services as well. + +### Economic Awareness + +Reduction of complexity and implementation efforts, albeit not at all costs, and not only on the short run. + +It is obviously of advantage when an option requires less implementation, or less complexity in its implementation. + +### Efficiency + +Regarding efficiency, the goal is to design an API that is tailored to providing responsiveness ([pagination](https://apisyouwonthate.com/blog/api-design-basics-pagination/), [SSEs (Server-Side Events)](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events), ...) and good network performance. + +The latter is achieved by minimizing the number of roundtrips between the client and the servers, which, in turn, is typically achieved through the use of higher level APIs as opposed to a granular API that provides more flexibility but also, by its very nature, requires the combination of multiple request-response roundtrips over the wire. + +### Third Party Consumption + +We are assuming that the APIs are public APIs (not just technically) and may be consumed by SDKs and third parties. + +Implications are that care must be put into providing an API that is stable, versioned, has a changelog, and potentially provided as a product with [LTS (Long-term Support)](https://en.wikipedia.org/wiki/Long-term_support) options. + +This also hints at the necessity of a capability exchange/discovery protocol between clients and the Groupware backend, as we will have different versions of clients and servers in the wild, and they need to be able to understand each other. Crucially, if locally running clients are developed, they can go a long time without being updated. + +## Considered Options + +* [LibreGraph](#libregraph) +* [JMAP](#jmap) +* [custom REST API](#custom-rest-api) (albeit potentially based on standards, at least partially) + +## Decision Outcome + +The decision was made to go with the custom REST implementation option, mainly due to + +* the use of LibreGraph providing little benefits + * if would provide us with a fleshed out API for groupware + * but we would not implement it fully + * and it is really an API for Outlook and Exchange, not a generic groupware standard + * furthermore, a significant blocker is that it does not provide for a way to support multiple accounts for a user + * the experience of implementing and using the LibreGraph API for the Drive components has made light of some challenges that we would not like to repeat +* using JMAP directly + * is a very interesting option in terms of standards, as it is an RFC, + * but we currently see that approach as too risky as per the potential complexity of parsing payloads of JMAP commands and their backreferences, plugging those across commands that must be forwarded as-is to Stalwart and others that need to be handled by the Groupware middleware itself, but also the potential need to reverse engineer the high-level meaning of chained low-level JMAP commands in order to implement enrichment, caches, reverse indexes, etc... + * however, it might be a better path forward in the future, especially if JMAP becomes a viable option for replacing the current use of LibreGraph as well + +### Consequences + +* we will need to design an API on our own, from scratch, albeit maximally making use of JMAP data structures +* that API will need to be maintained as a product, with documentation, versioning, LTS + +## Pros and Cons of the Options + +* [LibreGraph](#proscons-libregraph) +* [JMAP](#proscons-jmap) +* [Custom REST API](#proscons-custom) + +### LibreGraph + +[LibreGraph](https://github.com/opencloud-eu/libre-graph-api) is an API specification that is heavily inspired by and based on [Microsoft Graph](https://developer.microsoft.com/en-us/graph), of which it is a partial implementation, but also with modifications where necessary. + +Example: + +```text +GET /v1.0/me/messages?$select=sender,subject&$count=50&$orderby=received +``` + +#### Good + +* is already in use as the API for OpenCloud Drive operations, with a small stack to use it in the OpenCloud Web UI +* provides an API and data model that has already been thought out and used in production (albeit with only few different implementations) + +#### Neutral + +* does not have to follow the Microsoft Graph API, can be customized to our own needs, but in which case it becomes doubtful that there is any benefit in mimicking the Graph API in the first place if we diverge from it +* there is no compatibility benefit + * the only MUA that uses the Microsoft Graph API is Microsoft Outlook, and it is not a goal to support Microsoft Outlook as a MUA beyond standard IMAP/SMTP/CalDAV/CardDAV services (and that would be Microsoft Graph, not LibreGraph nor any customizations we would require) + * we will not implement all of the Microsoft Graph API + * we will not implement parts of the Microsoft Graph API as-is either, but will require to make modifications +* if there is a requirement for considering that API as a public API for third party integrators, then the API also needs to be documented, maintained, versioned, and kept stable as much as possible (this is neutral because it is a requirement that exists with every option) + +#### Bad + +* not an easy API to implement + * although we have libraries that take care of some of the more complex parts, such as parsing [OData](https://www.odata.org/) expressions + * really only easy to use when backed by a relational database and an object relational mapping framework using [ASP.NET](https://dotnet.microsoft.com/en-us/apps/aspnet) or [JPA](https://en.wikipedia.org/wiki/Jakarta_Persistence)/[Hibernate](https://hibernate.org/) + * its data model and peculiar interpretation of REST are really not [idomatic](https://en.wikipedia.org/wiki/HATEOAS) at all, and are clearly the result of reverse engineering the capabilities of Microsoft SQL Server and Exchange into a "standard" from the back, and then Microsoft Outlook's features and capabilities from the front +* not tailored to our needs + * we will most probably have a lot of cases in which we have to twist the Graph API to express what the UI needs + * will require using complex filters, which then require complex parsing in the backend in order to translate them into JMAP + * as opposed to directly using an expressive and maximally matching API in the first place + * we are likely to encounter use-cases that are not covered by the Graph API (especially due to our resource linking approach) +* does not support multiple accounts per user + * would require the addition of an account parameter, as a query parameter or as part of the path, which would make every URL in the API incompatible with Microsoft Graph +* more implementation effort than JMAP +* the JMAP RFCs already provides a data model, and we would end up converting between them all the time, with incompatibilities (Graph has attributes JMAP doesn't, and the other way around) +* possibly (probably?) more implementation effort than a custom REST API, due to its complexity + +#### Decision Drivers + +* UI Driven + * some members the OpenCloud Web Team strongly prefers not to use LibreGraph due to its complexity and to the fact that we would have to reftrofit operations into an existing API that was designed by a third party + * one upside is that there is already a client stack for performing LibreGraph operations, which could be reused to some degree for the Groupware APIs as well; it does not amount to all that much code though +* Economic Awareness + * more complexity and more effort as the other options due to the inherent complexity of the specification + * a data model is already specified in full, which might save us some time on that front + * although probably not really either since the actual data model we will work with on the backend is prescribed by JMAP, and we will only be looking to map attributes betsween JMAP and LibreGraph + * the data model is not necessarily thorougly documented either, which will leave room for interpretation, also due to incompatibilities between JMAP and Graph + * there will be attributes that are defined in JMAP and that we will receive from Stalwart that will not have a corresponding attribute in Graph (or be a list of values as opposed to a single value), and those will require to either lose some data by squashing it into the Graph data model, or extending the Graph data model which renders us incompatible with it +* Efficiency + * since the API is not tailored to our needs, we are much more likely to end up performing multiple roundtrips for single high level operations +* Third Party Consumption + * for some of the operations, we could point to the Microsoft Graph documentation, although that would not make for a great experience either, we would probably need to replicate it + * our deviations and extensions will have to be maintained just like the other options + * LibreGraph doesn't help with API stability either since + * we don't implement all of it, and need to document what we implement and what we don't, + * won't be compatible either due to modifications (additional parameters, unsupported parameters, different interpretations), + * and will just as equally need to evolve it as the other options, requiring the documentation of changes as well + * will be required to be maintained as a public API + * documentation + * LTS + * versioning + +### JMAP + +[JMAP (JSON Meta Application Protocol)](https://jmap.io/spec.html) is a set of specifications that are codified in RFCs: + +* [RFC 8620](https://tools.ietf.org/html/rfc8620): core JMAP protocol +* [RFC 8261](https://tools.ietf.org/html/rfc8621): JMAP Mail +* [RFC 8887](https://www.rfc-editor.org/rfc/rfc8887.html): JMAP subprotocol for WebSocket +* [RFC 9404](https://www.rfc-editor.org/rfc/rfc9404.html): JMAP Blob Management Extension +* [RFC 9425](https://www.rfc-editor.org/rfc/rfc9425.html): JMAP Quotas +* [RFC 9553](https://www.rfc-editor.org/rfc/rfc9553.html): uses JSContact +* [RFC 8984](https://www.rfc-editor.org/rfc/rfc8984.html): uses JSCalendar + +of which some are still in development at the time of writing: + +* [JMAP Contacts](https://jmap.io/spec-contacts.html) +* [JMAP Calendars](https://jmap.io/spec-calendars.html) +* [JMAP Sharing](https://jmap.io/spec-sharing.html) +* [JMAP Tasks](https://jmap.io/spec-tasks.html) + +To exemplify the JMAP protocol, the following code block is a JMAP request that + +* fetches the 30 last received emails from a mailbox (folder) +* the threads of those emails +* email metadata of all of those threads, including a preview + +
+Click here to toggle the display of this example. + +```json +[[ "Email/query", { + "accountId": "ue150411c", + "filter": { + "inMailbox": "fb666a55" + }, + "sort": [{ + "isAscending": false, + "property": "receivedAt" + }], + "collapseThreads": true, + "position": 0, + "limit": 30, + "calculateTotal": true +}, "0" ], +[ "Email/get", { + "accountId": "ue150411c", + "#ids": { + "resultOf": "0", + "name": "Email/query", + "path": "/ids" + }, + "properties": [ + "threadId" + ] +}, "1" ], +[ "Thread/get", { + "accountId": "ue150411c", + "#ids": { + "resultOf": "1", + "name": "Email/get", + "path": "/list/*/threadId" + } +}, "2" ], +[ "Email/get", { + "accountId": "ue150411c", + "#ids": { + "resultOf": "2", + "name": "Thread/get", + "path": "/list/*/emailIds" + }, + "properties": [ + "threadId", + "mailboxIds", + "keywords", + "hasAttachment", + "from", + "subject", + "receivedAt", + "size", + "preview" + ] +}, "3" ]] +``` + +
+ +#### Good + +* flexible protocol that can easily be implemented by clients +* potentially does not require implementation efforts on the backend side +* would obviously support the full potential of JMAP and Stalwart +* we could potentially extend JMAP with our own data models and operations based on the [JMAP Core Protocol](https://jmap.io/spec-core.html), possibly even propose them as RFCs +* we can start with JMAP request objects that contain only a few or even only one JMAP methods (indicated by the [maxCallsInRequest capability](https://datatracker.ietf.org/doc/html/rfc8620#section-2)), allowing more calls as we need + * clients could implement the funtionality they need using multiple requests in the beginning, then we implement missing functionality on the server + * this would allow us to speed up requests that we need while at the same time giving clients the ability to make any necessary individual calls + * probably only a partially useful approach since chaining JMAP requests is necessary for even the most mundane operations, to avoid the inefficiency of multiple roundtrips + +#### Neutral + +* the [existing JMAP specifications](https://jmap.io/spec.html) will not cover 100% of the Web UI API needs (e.g. configuration settings[^config], [resource linking](./0003-groupware-resource-linking.md), ...), but that does not prevent us from implementing additional custom APIs, either as non-JMAP REST APIs, or as extensions of JMAP +* we would need to gauge whether JMAP communication + * should occur directly between the OpenCloud UI and Stalwart, + * or whether an OpenCloud Groupware service should be used as an intermediary and as an [anti-corruption layer](https://ddd-practitioners.com/home/glossary/bounded-context/bounded-context-relationship/anticorruption-layer/) +* if there is a requirement for considering that API as a public API for third party integrators, then the API also needs to be documented, maintained, versioned, and kept stable as much as possible (this is neutral because it is a requirement that exists with every option) + +[^config]: although Stalwart will most likely have a [JMAP API for application configuration settings as well](https://matrix.to/#/!blIcSTIPwfKMtOEWcg:matrix.org/$CD9C6IZN28bbmN0Arb_Y-RapgsS4XqAqnDgf15yJahM?via=matrix.org&via=mozilla.org&via=chat.opencloud.eu) + > Message from [Mauro](https://github.com/mdecimus): + > + > Hi everyone, I'm curious what you think about standardizing a simple protocol/extension for users to easily manage certain account settings directly from their email clients. For instance, such a protocol could handle: + > + > * Passwords, app passwords, and MFA settings + > * Locale preferences + > * Timezone configuration + > * Basic email forwarding (without needing custom Sieve scripts) + > * Vacation/auto-responses + > * Blocking specific email addresses + > * Spam reporting (though not strictly a setting) + > * Calendar-related preferences + > * Encryption-at-rest settings + > * Mail auto-expunge policies + > * ... and potentially more. + > + > My initial thought is to implement this as a JMAP extension rather than inventing another protocol similar to ManageSieve, which feels somewhat like a "Frankenstein" IMAP extension. + > + > Many mailbox providers already offer some or all of these settings through their web interfaces, but a standardized JMAP-based extension could let users adjust these directly within their preferred email clients or via APIs. + +#### Bad + +* potentially bad: most probably too flexible for its own good, as it makes it difficult to reverse-engineer the high-level meaning of a set of JMAP requests in order to capture its semantics, e.g. to implement caching or reverse indexes for performance +* since the OpenCloud Drive backends use the LibreGraph API, using a JMAP based API for Groupware bears the risk of having multiple APIs to do the same thing, which we need to be careful about, and avoid if possible + +> [!NOTE] +> This seems like a mild "bad" item, but the risk risk here is significant: if it turns out that we need to capture the semantics of API requests to perform additional operations (e.g. caching or indexing for performance reasons, or to decorate the data from Stalwart with information from other services), then we would have to re-implement the whole API as JMAP is too complex to parse to extract semantics from. + +#### Two Approaches + +There are two approaches as to how to implement our protocol based on JMAP: + +* either our clients must split JMAP operations and send some to Stalwart, and others to the Groupware backend (depending on which endpoint is in charge of which API) +* or our clients send all the JMAP operations to the Groupware backend, which is then in charge to relay JMAP commands that are to be handled by Stalwart to Stalwart + +##### Directly to Stalwart + +If the OpenCloud UI Client communicates directly with Stalwart (using JMAP), then + +* good: we don't need to implement any sort of "bridge" in the OpenCloud Groupware service (although the implementation effort is likely to be low) +* good: we avoid an additional hop in the network, gaining on performance and potentially on throughput +* bad: it will have to perform additional API requests for data and features that are not provided by Stalwart with the OpenCloud Groupware service (e.g. [Resource Linking](./0003-groupware-resource-linking.md)) as well, which is likely to lead to an increase in the number of network roundtrips +* bad: would be unable to extend the protocol with OpenCloud Groupware specific models and data +* bad: would be unable to implement caching or similar performance improvements if necessary +* bad: prevents us from implementing infrastructure features that are not present in Stalwart and might never be in the way we would need them, e.g. sharding across multi-site redundancy + +```mermaid +graph LR + subgraph clients + ui(OpenCloud UI) + muas(Other
MUAs) + end + subgraph Backend + subgraph OpenCloud + direction TB + groupware("OpenCloud
Groupware") + drive("OpenCloud
Drive") + end + stalwart(Stalwart) + end + subgraph Storage + drive_storage[(drive
storage)] + stalwart_metadata[(metadata
storage)] + stalwart_storage[(object
storage)] + end + ui x@==>|JMAP|stalwart + x@{ animate: true } + ui y@==>|JMAP or REST|groupware + y@{ animate: true } + ui-->|Graph|drive + muas-->|IMAP,SMTP,*DAV|stalwart + groupware-->drive + groupware-->|JMAP|stalwart + drive-->drive_storage + stalwart-->stalwart_metadata + stalwart-->stalwart_storage +``` + +##### Groupware intermediary + +Alternatively, if the OpenCloud UI Client exclusively communicates with the OpenCloud Groupware service (using JMAP), then + +* good: the OpenCloud Groupware service acts as a anti-corruption layer, which would allow us to + * implement caching and similar performance improvement measures if necessary (e.g. reverse indexing of costly data) + * implement infrastructure features that are not present in Stalwart and might never be in the way we would need them, e.g. sharding across multi-site redundancy + * extend the JMAP protocol +* good: it enables us to minimize network roundtrips between the OpenCloud UI Client and the OpenCloud Groupware backend as there is no need to perform additional requests elsewhere +* bad: we have an additional intermediary hop that "just" relays operations to Stalwart most of the time + * due to Go HTTP stack limitations (lack of zero-copy asynchronous I/O), + * that might incur a cost of "needlessly" copying data in memory + * as well as performing blocking I/O (at the very least since JMAP requests first need to be read in full by te OpenCloud Groupware before they then can be sent to Stalwart more or less as-is, and the same applies to the responses) + +```mermaid +graph LR + subgraph clients + ui(OpenCloud UI) + muas(Other
MUAs) + end + subgraph Backend + subgraph OpenCloud + direction TB + groupware("OpenCloud
Groupware") + drive("OpenCloud
Drive") + end + stalwart(Stalwart) + end + subgraph Storage + drive_storage[(drive
storage)] + stalwart_metadata[(metadata
storage)] + stalwart_storage[(object
storage)] + end + ui y@==>|JMAP|groupware + y@{ animate: true } + ui-->|Graph|drive + muas-->|IMAP,SMTP,*DAV|stalwart + groupware-->drive + groupware-->|JMAP|stalwart + drive-->drive_storage + stalwart-->stalwart_metadata + stalwart-->stalwart_storage +``` + +#### Decision Drivers + +* UI Driven + * the UI team did not express any particular preference for this option, but the JMAP protocol is simple to implement on any client +* Economic Awareness + * there would be less of a need to develop an API, but that doesn't put much into the balance + * developing a generic inbound JMAP command processing engine that is capable of resolving backreferences with requests that can be sent out to different backends (Stalwart, Drive, Groupware, OpenTalk, ...) seems risky in terms of complexity, also since Go doesn't have much of a [well-supported Reactive framework](https://github.com/ReactiveX/RxGo) +* Efficiency + * the ability of the JMAP protocol to chain multiple low-level commands provides for a very efficient way to compose higher-level operations without the need for multiple round-trips +* Third Party Consumption + * for some of the operations, we could point to the JMAP documentation and RFCs, although that would not make for a great experience either, we would probably need to replicate it + * our protocol extensions will have to be maintained just like the other options + * will be required to be maintained as a public API + * documentation + * LTS + * versioning + +###
Custom REST API + +A custom REST API would implement the resources and semantics as they are needed by the UI, and would be strongly if not fully UI-driven. + +The data model should remain close or equal to JMAP's, to avoid data loss by converting back and forth. + +We might look into existing specifications for formatting JSON payloads, such as [JSON:API](https://jsonapi.org/) or partial ones such as such as [JSON-LD](https://json-ld.org/) for relationships between resources, but this is currently outside of the scope of this ADR. + +```mermaid +graph LR + subgraph clients + ui(OpenCloud UI) + muas(Other
MUAs) + end + subgraph Backend + subgraph OpenCloud + direction TB + groupware("OpenCloud
Groupware") + drive("OpenCloud
Drive") + end + stalwart(Stalwart) + end + subgraph Storage + drive_storage[(drive
storage)] + stalwart_metadata[(metadata
storage)] + stalwart_storage[(object
storage)] + end + ui y@==>|REST|groupware + y@{ animate: true } + ui-->|Graph|drive + muas-->|IMAP,SMTP,*DAV|stalwart + groupware-->drive + groupware-->|JMAP|stalwart + drive-->drive_storage + stalwart-->stalwart_metadata + stalwart-->stalwart_storage +``` + +Example: + +```text +GET /groupware/startup/1/?mails=50 +``` + +#### Good + +* completely tailored to the needs of the OpenCloud UI +* a higher-level API allows for easily understanding the semantic of each operation, which enables the potential for keeping track of data in order to implement reverse indexes and caching, if necessary to achieve functional or performance goals, as opposed to using a lower-level API such as JMAP which is maximally flexible and difficult to reverse-engineer the meaning of the operation and data +* can also be tailored to the capabilities of JMAP without exposing all of its flexibility +* provides the potential for expanding upon what JMAP provides +* would support the full potential of JMAP and Stalwart since the API would be designed accordingly +* allows learning how to use and cache individual JMAP method call responses, allowing to make a better decision in the future if JMAP should be used by clients + +#### Neutral + +* if there is a requirement for considering that API as a public API for third party integrators, then the API also needs to be documented, maintained, versioned, and kept stable as much as possible (this is neutral because it is a requirement that exists with every option) + +#### Bad + +* only partially follows any standards (REST, JSON, JMAP for data models) +* requires designing the API from scratch, as opposed to using the Graph API which already prescribes one + * although it probably makes sense to re-use the data model of JMAP, which is prescribed in RFCs, also to avoid data loss and copying things around needlessly +* since the OpenCloud Drive backends use the LibreGraph API, using a custom REST API for Groupware bears the risk of having multiple APIs to do the same thing, which we need to be careful about, and avoid if possible + +#### Decision Drivers + +* UI Driven + * favoured solution for the OpenCloud Web UI team +* Economic Awareness + * designing a new custom API is not much effort since it is UI requirements driven + * maintaining a new custom API or JMAP extensions is not more effort either, since the exact same thing needs to be done with LibreGraph, as it will have numerous exceptions and will require documenting those, as well as which parts of the Microsoft Graph API are implemented and which aren't +* Efficiency + * the most efficient approach since it is tailored to what is actually needed for the OpenCloud UI, which will allow us to reduce the roundtrips to a minimum +* Third Party Consumption + * a custom API will be required to be maintained as a public API + * documentation + * LTS + * versioning diff --git a/docs/adr/0008-configuration-settings.md b/docs/adr/0008-configuration-settings.md new file mode 100644 index 0000000000..b9d25ad9eb --- /dev/null +++ b/docs/adr/0008-configuration-settings.md @@ -0,0 +1,52 @@ +--- +status: proposed +date: 2025-07-07 +author: Pascal Bleser +decision-makers: +consulted: +informed: +title: "Groupware Configuration Settings" +template: https://raw.githubusercontent.com/adr/madr/refs/tags/4.0.0/template/adr-template.md +--- + +* Status: draft + +## Context + +User Preferences need to be configurable through the UI and persisted in a backend service in order to be reliably available and backed up. + +Such configuration options have default values that need to be set on multiple levels: + +* globally +* by tenant +* by sub-tenant +* by group of users +* by user + +Some options might even be client-specific, e.g. differ between the OpenCloud Web UI on desktop and the OpenCloud Web UI on mobile. + +Furthermore, some options might be enforced and may not be overridden on every level (e.g. only globally or by tenant, by not modifiable by users.) + +Ideally, the configuration settings have an architecture that permits pluggable sources. + +This level of necessary complexity has a few drawbacks, the primary one being that it can become difficult to find out why a user sees this or that behavior in their UI, and thus to trace down where a given configuration setting is made (globally, on tenant level, etc...). It is thus critical to include tooling that allows to debug them. + +## Considered Options + +TODO + +## Decision Outcome + +TODO + +### Consequences + +TODO + +### Confirmation + +TODO + +## Pros and Cons of the Options + +TODO diff --git a/go.mod b/go.mod index f4dd437f0a..60b3de7c2e 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,9 @@ require ( github.com/CiscoM31/godata v1.0.11 github.com/KimMachineGun/automemlimit v0.7.5 github.com/Masterminds/semver v1.5.0 + github.com/MicahParks/jwkset v0.8.0 github.com/MicahParks/keyfunc/v2 v2.1.0 + github.com/MicahParks/keyfunc/v3 v3.3.11 github.com/Nerzal/gocloak/v13 v13.9.0 github.com/bbalet/stopwords v1.0.0 github.com/beevik/etree v1.6.0 @@ -19,6 +21,7 @@ require ( github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 github.com/dutchcoders/go-clamd v0.0.0-20170520113014-b970184f4d9e github.com/gabriel-vasile/mimetype v1.4.11 + github.com/emersion/go-imap/v2 v2.0.0-beta.5 github.com/ggwhite/go-masker v1.1.0 github.com/go-chi/chi/v5 v5.2.3 github.com/go-chi/render v1.0.3 @@ -137,6 +140,7 @@ require ( github.com/antithesishq/antithesis-sdk-go v0.4.3-default-no-op // indirect github.com/armon/go-radix v1.0.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect + github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bitly/go-simplejson v0.5.0 // indirect github.com/bits-and-blooms/bitset v1.22.0 // indirect @@ -159,9 +163,11 @@ require ( github.com/blevesearch/zapx/v16 v16.2.7 // indirect github.com/bluele/gcache v0.0.2 // indirect github.com/bombsimon/logrusr/v3 v3.1.0 // indirect + github.com/brianvoe/gofakeit/v7 v7.7.3 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cenkalti/backoff/v5 v5.0.3 // indirect github.com/ceph/go-ceph v0.36.0 // indirect + github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cevaris/ordered_map v0.0.0-20190319150403-3adeae072e73 // indirect github.com/clipperhouse/displaywidth v0.3.1 // indirect @@ -193,8 +199,11 @@ require ( github.com/docker/go-connections v0.6.0 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect + github.com/dustinkirkland/golang-petname v0.0.0-20240428194347-eebcea082ee0 // indirect github.com/ebitengine/purego v0.8.4 // indirect github.com/egirna/icap v0.0.0-20181108071049-d5ee18bd70bc // indirect + github.com/emersion/go-message v0.18.1 // indirect + github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/emvi/iso-639-1 v1.1.1 // indirect github.com/evanphx/json-patch/v5 v5.5.0 // indirect @@ -204,6 +213,8 @@ require ( github.com/gdexlab/go-render v1.0.1 // indirect github.com/go-acme/lego/v4 v4.4.0 // indirect github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect + github.com/go-crypt/crypt v0.4.5 // indirect + github.com/go-crypt/x v0.4.7 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.6.2 // indirect github.com/go-git/go-git/v5 v5.13.2 // indirect @@ -224,7 +235,7 @@ require ( github.com/go-sql-driver/mysql v1.9.3 // indirect github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect github.com/go-task/slim-sprig/v3 v3.0.0 // indirect - github.com/go-test/deep v1.1.0 // indirect + github.com/go-test/deep v1.1.1 // indirect github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobwas/glob v0.2.3 // indirect github.com/gobwas/httphead v0.1.0 // indirect @@ -235,6 +246,7 @@ require ( github.com/gofrs/flock v0.13.0 // indirect github.com/gofrs/uuid v4.4.0+incompatible // indirect github.com/gogo/protobuf v1.3.2 // indirect + github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/snappy v0.0.4 // indirect @@ -244,8 +256,10 @@ require ( github.com/google/pprof v0.0.0-20250403155104-27863c87afa6 // indirect github.com/google/renameio/v2 v2.0.1 // indirect github.com/gookit/goutil v0.7.1 // indirect + github.com/gorilla/css v1.0.1 // indirect github.com/gorilla/handlers v1.5.1 // indirect github.com/gorilla/schema v1.4.1 // indirect + github.com/gorilla/websocket v1.5.3 // indirect github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect github.com/hashicorp/go-hclog v1.6.3 // indirect github.com/hashicorp/go-plugin v1.7.0 // indirect @@ -253,8 +267,10 @@ require ( github.com/huandu/xstrings v1.5.0 // indirect github.com/iancoleman/strcase v0.3.0 // indirect github.com/imdario/mergo v0.3.15 // indirect + github.com/inbucket/html2text v0.9.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/jhillyerd/enmime/v2 v2.2.0 // indirect github.com/jonboulle/clockwork v0.5.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/juliangruber/go-intersect v1.1.0 // indirect @@ -285,6 +301,7 @@ require ( github.com/mattn/go-sqlite3 v1.14.32 // indirect github.com/maxymania/go-system v0.0.0-20170110133659-647cc364bf0b // indirect github.com/mendsley/gojwk v0.0.0-20141217222730-4d5ec6e58103 // indirect + github.com/microcosm-cc/bluemonday v1.0.27 // indirect github.com/miekg/dns v1.1.57 // indirect github.com/mileusna/useragent v1.3.5 // indirect github.com/minio/crc64nvme v1.1.0 // indirect @@ -352,6 +369,7 @@ require ( github.com/skeema/knownhosts v1.3.0 // indirect github.com/spacewander/go-suffix-tree v0.0.0-20191010040751-0865e368c784 // indirect github.com/spf13/pflag v1.0.10 // indirect + github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/studio-b12/gowebdav v0.9.0 // indirect github.com/tchap/go-patricia/v2 v2.3.3 // indirect @@ -391,6 +409,7 @@ require ( google.golang.org/genproto v0.0.0-20250303144028-a0af3efb3deb // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20251022142026-3a174f9686a8 // indirect gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect + gopkg.in/loremipsum.v1 v1.1.2 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect sigs.k8s.io/yaml v1.6.0 // indirect diff --git a/go.sum b/go.sum index 75e411b265..141dc436c7 100644 --- a/go.sum +++ b/go.sum @@ -82,8 +82,12 @@ github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1 github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60= github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/MicahParks/jwkset v0.8.0 h1:jHtclI38Gibmu17XMI6+6/UB59srp58pQVxePHRK5o8= +github.com/MicahParks/jwkset v0.8.0/go.mod h1:fVrj6TmG1aKlJEeceAz7JsXGTXEn72zP1px3us53JrA= github.com/MicahParks/keyfunc/v2 v2.1.0 h1:6ZXKb9Rp6qp1bDbJefnG7cTH8yMN1IC/4nf+GVjO99k= github.com/MicahParks/keyfunc/v2 v2.1.0/go.mod h1:rW42fi+xgLJ2FRRXAfNx9ZA8WpD4OeE/yHVMteCkw9k= +github.com/MicahParks/keyfunc/v3 v3.3.11 h1:eA6wNltwdSRX2gtpTwZseBCC9nGeBkI9KxHtTyZbDbo= +github.com/MicahParks/keyfunc/v3 v3.3.11/go.mod h1:y6Ed3dMgNKTcpxbaQHD8mmrYDUZWJAxteddA6OQj+ag= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= @@ -134,6 +138,8 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkY github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/aws/aws-sdk-go v1.37.27/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/bbalet/stopwords v1.0.0 h1:0TnGycCtY0zZi4ltKoOGRFIlZHv0WqpoIGUsObjztfo= github.com/bbalet/stopwords v1.0.0/go.mod h1:sAWrQoDMfqARGIn4s6dp7OW7ISrshUD8IP2q3KoqPjc= github.com/beevik/etree v1.6.0 h1:u8Kwy8pp9D9XeITj2Z0XtA5qqZEmtJtuXZRQi+j03eE= @@ -194,6 +200,8 @@ github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dR github.com/bombsimon/logrusr/v3 v3.1.0 h1:zORbLM943D+hDMGgyjMhSAz/iDz86ZV72qaak/CA0zQ= github.com/bombsimon/logrusr/v3 v3.1.0/go.mod h1:PksPPgSFEL2I52pla2glgCyyd2OqOHAnFF5E+g8Ixco= github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= +github.com/brianvoe/gofakeit/v7 v7.7.3 h1:RWOATEGpJ5EVg2nN8nlaEyaV/aB4d6c3GqYrbqQekss= +github.com/brianvoe/gofakeit/v7 v7.7.3/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= github.com/butonic/go-micro/v4 v4.11.1-0.20241115112658-b5d4de5ed9b3 h1:h8Z0hBv5tg/uZMKu8V47+DKWYVQg0lYP8lXDQq7uRpE= @@ -212,6 +220,8 @@ github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/ceph/go-ceph v0.36.0 h1:IDE4vEF+4fmjve+CPjD1WStgfQ+Lh6vD+9PMUI712KI= github.com/ceph/go-ceph v0.36.0/go.mod h1:fGCbndVDLuHW7q2954d6y+tgPFOBnRLqJRe2YXyngw4= +github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a h1:MISbI8sU/PSK/ztvmWKFcI7UGb5/HQT7B+i3a2myKgI= +github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a/go.mod h1:2GxOXOlEPAMFPfp014mK1SWq8G8BN8o7/dfYqJrVGn8= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -318,6 +328,8 @@ github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4 github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/dustinkirkland/golang-petname v0.0.0-20240428194347-eebcea082ee0 h1:aYo8nnk3ojoQkP5iErif5Xxv0Mo0Ga/FR5+ffl/7+Nk= +github.com/dustinkirkland/golang-petname v0.0.0-20240428194347-eebcea082ee0/go.mod h1:8AuBTZBRSFqEYBPYULd+NN474/zZBLP+6WeT5S9xlAc= github.com/dutchcoders/go-clamd v0.0.0-20170520113014-b970184f4d9e h1:rcHHSQqzCgvlwP0I/fQ8rQMn/MpHE5gWSLdtpxtP6KQ= github.com/dutchcoders/go-clamd v0.0.0-20170520113014-b970184f4d9e/go.mod h1:Byz7q8MSzSPkouskHJhX0er2mZY/m0Vj5bMeMCkkyY4= github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= @@ -329,6 +341,12 @@ github.com/egirna/icap v0.0.0-20181108071049-d5ee18bd70bc h1:6IxmRbXV8WXVkcYcTzk github.com/egirna/icap v0.0.0-20181108071049-d5ee18bd70bc/go.mod h1:FdVN2WHg7zOHhJ7kZQdDorfFhIfqZaHttjAzDDvAXHE= github.com/elazarl/goproxy v1.4.0 h1:4GyuSbFa+s26+3rmYNSuUVsx+HgPrV1bk1jXI0l9wjM= github.com/elazarl/goproxy v1.4.0/go.mod h1:X/5W/t+gzDyLfHW4DrMdpjqYjpXsURlBt9lpBDxZZZQ= +github.com/emersion/go-imap/v2 v2.0.0-beta.5 h1:H3858DNmBuXyMK1++YrQIRdpKE1MwBc+ywBtg3n+0wA= +github.com/emersion/go-imap/v2 v2.0.0-beta.5/go.mod h1:BZTFHsS1hmgBkFlHqbxGLXk2hnRqTItUgwjSSCsYNAk= +github.com/emersion/go-message v0.18.1 h1:tfTxIoXFSFRwWaZsgnqS1DSZuGpYGzSmCZD8SK3QA2E= +github.com/emersion/go-message v0.18.1/go.mod h1:XpJyL70LwRvq2a8rVbHXikPgKj8+aI0kGdHlg16ibYA= +github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43 h1:hH4PQfOndHDlpzYfLAAfl63E8Le6F2+EL/cdhlkyRJY= +github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43/go.mod h1:iL2twTeMvZnrg54ZoPDNfJaJaqy0xIQFuBdrLsmspwQ= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/emvi/iso-639-1 v1.1.1 h1:7jrl1Sqw9ZYWmCOaH+cpQotLbGr/khwlLPXlBvE8WXU= @@ -384,6 +402,10 @@ github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hH github.com/go-chi/render v1.0.3 h1:AsXqd2a1/INaIfUSKq3G5uA8weYx20FOsM7uSoCyyt4= github.com/go-chi/render v1.0.3/go.mod h1:/gr3hVkmYR0YlEy3LxCuVRFzEu9Ruok+gFqbIofjao0= github.com/go-cmd/cmd v1.0.5/go.mod h1:y8q8qlK5wQibcw63djSl/ntiHUHXHGdCkPk0j4QeW4s= +github.com/go-crypt/crypt v0.4.5 h1:cCR5vVejGk1kurwoGfkLxGORY+Pc9GiE7xKCpyHZ3n4= +github.com/go-crypt/crypt v0.4.5/go.mod h1:cQijpCkqavdF52J1bE0PObWwqKKjQCHASHQ2dtLzOJs= +github.com/go-crypt/x v0.4.7 h1:hObjW67nhq/GI1jaD7XCv5RoiVKzF46XIbULgzH71oU= +github.com/go-crypt/x v0.4.7/go.mod h1:K3q7VmLC0U1QFAPn0SQvXjkAtu6FJuH0rN9LNqobX6k= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= @@ -471,6 +493,7 @@ github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1v github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= +github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobs/pretty v0.0.0-20180724170744-09732c25a95b/go.mod h1:Xo4aNUOrJnVruqWQJBtW6+bTBDTniY8yZum5rF3b5jw= @@ -499,6 +522,8 @@ github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7a github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f h1:3BSP1Tbs2djlpprl7wCLuiqMaUh5SJkkzI2gDs+FgLs= +github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14= github.com/goji/httpauth v0.0.0-20160601135302-2da839ab0f4d/go.mod h1:nnjvkQ9ptGaCkuDUx6wNykzzlUixGxvkme+H/lnzb+A= github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= @@ -606,6 +631,8 @@ github.com/gophercloud/gophercloud v0.16.0/go.mod h1:wRtmUelyIIv3CSSDI47aUwbs075 github.com/gophercloud/utils v0.0.0-20210216074907-f6de111f2eae/go.mod h1:wx8HMD8oQD0Ryhz6+6ykq75PJ79iPyEqYHfwZ4l7OsA= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= +github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4= github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= @@ -615,6 +642,8 @@ github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWS github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E= github.com/gorilla/schema v1.4.1/go.mod h1:Dg5SSm5PV60mhF2NFaTV1xuYYj8tV8NOPRo4FggUMnM= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI= github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8= @@ -664,6 +693,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1: github.com/iij/doapi v0.0.0-20190504054126-0bbf12d6d7df/go.mod h1:QMZY7/J/KSQEhKWFeDesPjMj+wCHReeknARU3wqlyN4= github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= +github.com/inbucket/html2text v0.9.0 h1:ULJmVcBEMAcmLE+/rN815KG1Fx6+a4HhbUxiDiN+qks= +github.com/inbucket/html2text v0.9.0/go.mod h1:QDaumzl+/OzlSVbNohhmg+yAy5pKjUjzCKW2BMvztKE= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= @@ -690,6 +721,8 @@ github.com/jellydator/ttlcache/v2 v2.11.1/go.mod h1:RtE5Snf0/57e+2cLWFYWCCsLas2H github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP4mnWdTY= github.com/jellydator/ttlcache/v3 v3.4.0/go.mod h1:Hw9EgjymziQD3yGsQdf1FqFdpp7YjFMd4Srg5EJlgD4= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jhillyerd/enmime/v2 v2.2.0 h1:Pe35MB96eZK5Q0XjlvPftOgWypQpd1gcbfJKAt7rsB8= +github.com/jhillyerd/enmime/v2 v2.2.0/go.mod h1:SOBXlCemjhiV2DvHhAKnJiWrtJGS/Ffuw4Iy7NjBTaI= github.com/jhump/protoreflect v1.17.0 h1:qOEr613fac2lOuTgWN4tPAtLL7fUSbuJL5X5XumQh94= github.com/jhump/protoreflect v1.17.0/go.mod h1:h9+vUUL38jiBzck8ck+6G/aeMX8Z4QUY/NiJPwPNi+8= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= @@ -842,6 +875,8 @@ github.com/mendsley/gojwk v0.0.0-20141217222730-4d5ec6e58103 h1:Z/i1e+gTZrmcGeZy github.com/mendsley/gojwk v0.0.0-20141217222730-4d5ec6e58103/go.mod h1:o9YPB5aGP8ob35Vy6+vyq3P3bWe7NQWzf+JLiXCiMaE= github.com/mfridman/tparse v0.18.0 h1:wh6dzOKaIwkUGyKgOntDW4liXSo37qg5AXbIhkMV3vE= github.com/mfridman/tparse v0.18.0/go.mod h1:gEvqZTuCgEhPbYk/2lS3Kcxg1GmTxxU7kTC8DvP0i/A= +github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk= +github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.40/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= github.com/miekg/dns v1.1.57 h1:Jzi7ApEIzwEPLHWRcafCN9LZSBbqQpxjt/wpgvg7wcM= @@ -1163,6 +1198,8 @@ github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo= +github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= @@ -1784,6 +1821,8 @@ gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.51.1/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.57.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/loremipsum.v1 v1.1.2 h1:12APklfJKuGszqZsrArW5QoQh03/W+qyCCjvnDuS6Tw= +gopkg.in/loremipsum.v1 v1.1.2/go.mod h1:TuRvzFuzuejXj+odBU6Tubp/EPUyGb9wmSvHenyP2Ts= gopkg.in/ns1/ns1-go.v2 v2.4.4/go.mod h1:GMnKY+ZuoJ+lVLL+78uSTjwTz2jMazq6AfGKQOYhsPk= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= diff --git a/opencloud/cmd/opencloud/.gitignore b/opencloud/cmd/opencloud/.gitignore new file mode 100644 index 0000000000..ec09c2e186 --- /dev/null +++ b/opencloud/cmd/opencloud/.gitignore @@ -0,0 +1 @@ +/__debug_bin* diff --git a/opencloud/pkg/command/services.go b/opencloud/pkg/command/services.go index e87548979c..02cdb70ee1 100644 --- a/opencloud/pkg/command/services.go +++ b/opencloud/pkg/command/services.go @@ -13,6 +13,7 @@ import ( appprovider "github.com/opencloud-eu/opencloud/services/app-provider/pkg/command" appregistry "github.com/opencloud-eu/opencloud/services/app-registry/pkg/command" audit "github.com/opencloud-eu/opencloud/services/audit/pkg/command" + authapi "github.com/opencloud-eu/opencloud/services/auth-api/pkg/command" authapp "github.com/opencloud-eu/opencloud/services/auth-app/pkg/command" authbasic "github.com/opencloud-eu/opencloud/services/auth-basic/pkg/command" authbearer "github.com/opencloud-eu/opencloud/services/auth-bearer/pkg/command" @@ -25,6 +26,7 @@ import ( gateway "github.com/opencloud-eu/opencloud/services/gateway/pkg/command" graph "github.com/opencloud-eu/opencloud/services/graph/pkg/command" groups "github.com/opencloud-eu/opencloud/services/groups/pkg/command" + groupware "github.com/opencloud-eu/opencloud/services/groupware/pkg/command" idm "github.com/opencloud-eu/opencloud/services/idm/pkg/command" idp "github.com/opencloud-eu/opencloud/services/idp/pkg/command" invitations "github.com/opencloud-eu/opencloud/services/invitations/pkg/command" @@ -138,6 +140,11 @@ var svccmds = []register.Command{ cfg.Groups.Commons = cfg.Commons }) }, + func(cfg *config.Config) *cli.Command { + return ServiceCommand(cfg, cfg.Groupware.Service.Name, groupware.GetCommands(cfg.Groupware), func(c *config.Config) { + cfg.Groupware.Commons = cfg.Commons + }) + }, func(cfg *config.Config) *cli.Command { return ServiceCommand(cfg, cfg.IDM.Service.Name, idm.GetCommands(cfg.IDM), func(c *config.Config) { cfg.IDM.Commons = cfg.Commons @@ -263,6 +270,11 @@ var svccmds = []register.Command{ cfg.Webfinger.Commons = cfg.Commons }) }, + func(cfg *config.Config) *cli.Command { + return ServiceCommand(cfg, cfg.AuthApi.Service.Name, authapi.GetCommands(cfg.AuthApi), func(c *config.Config) { + cfg.AuthApi.Commons = cfg.Commons + }) + }, } // ServiceCommand is the entry point for the all service commands. diff --git a/opencloud/pkg/init/structs.go b/opencloud/pkg/init/structs.go index 89f836441a..1f2a554054 100644 --- a/opencloud/pkg/init/structs.go +++ b/opencloud/pkg/init/structs.go @@ -32,6 +32,7 @@ type OpenCloudConfig struct { AuthBearer AuthbearerService `yaml:"auth_bearer"` Users UsersAndGroupsService `yaml:"users"` Groups UsersAndGroupsService `yaml:"groups"` + Groupware GroupwareService `yaml:"groupware"` Ocdav InsecureService `yaml:"ocdav"` Ocm OcmService `yaml:"ocm"` Thumbnails ThumbnailService `yaml:"thumbnails"` @@ -126,6 +127,17 @@ type GraphService struct { ServiceAccount ServiceAccount `yaml:"service_account"` } +// GroupwareSettings is the configuration for the groupware settings +type GroupwareSettings struct { + WebdavAllowInsecure bool `yaml:"webdav_allow_insecure"` + Cs3AllowInsecure bool `yaml:"cs3_allow_insecure"` +} + +// GroupwareService is the configuration for the groupware service +type GroupwareService struct { + Groupware GroupwareSettings +} + // IdmService is the configuration for the IDM service type IdmService struct { ServiceUserPasswords ServiceUserPasswordsSettings `yaml:"service_user_passwords"` diff --git a/opencloud/pkg/runtime/service/service.go b/opencloud/pkg/runtime/service/service.go index a02bab58cf..c5ea45a3c3 100644 --- a/opencloud/pkg/runtime/service/service.go +++ b/opencloud/pkg/runtime/service/service.go @@ -24,6 +24,7 @@ import ( appProvider "github.com/opencloud-eu/opencloud/services/app-provider/pkg/command" appRegistry "github.com/opencloud-eu/opencloud/services/app-registry/pkg/command" audit "github.com/opencloud-eu/opencloud/services/audit/pkg/command" + authapi "github.com/opencloud-eu/opencloud/services/auth-api/pkg/command" authapp "github.com/opencloud-eu/opencloud/services/auth-app/pkg/command" authbasic "github.com/opencloud-eu/opencloud/services/auth-basic/pkg/command" authmachine "github.com/opencloud-eu/opencloud/services/auth-machine/pkg/command" @@ -35,6 +36,7 @@ import ( gateway "github.com/opencloud-eu/opencloud/services/gateway/pkg/command" graph "github.com/opencloud-eu/opencloud/services/graph/pkg/command" groups "github.com/opencloud-eu/opencloud/services/groups/pkg/command" + groupware "github.com/opencloud-eu/opencloud/services/groupware/pkg/command" idm "github.com/opencloud-eu/opencloud/services/idm/pkg/command" idp "github.com/opencloud-eu/opencloud/services/idp/pkg/command" invitations "github.com/opencloud-eu/opencloud/services/invitations/pkg/command" @@ -348,6 +350,16 @@ func NewService(ctx context.Context, options ...Option) (*Service, error) { cfg.Notifications.Commons = cfg.Commons return notifications.Execute(cfg.Notifications) }) + areg(opts.Config.AuthApi.Service.Name, func(ctx context.Context, cfg *occfg.Config) error { + cfg.AuthApi.Context = ctx + cfg.AuthApi.Commons = cfg.Commons + return authapi.Execute(cfg.AuthApi) + }) + areg(opts.Config.Groupware.Service.Name, func(ctx context.Context, cfg *occfg.Config) error { + cfg.Groupware.Context = ctx + cfg.Groupware.Commons = cfg.Commons + return groupware.Execute(cfg.Groupware) + }) return s, nil } diff --git a/pkg/config/config.go b/pkg/config/config.go index 1e0c68a41a..31a3d2aa2d 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -7,6 +7,7 @@ import ( appProvider "github.com/opencloud-eu/opencloud/services/app-provider/pkg/config" appRegistry "github.com/opencloud-eu/opencloud/services/app-registry/pkg/config" audit "github.com/opencloud-eu/opencloud/services/audit/pkg/config" + authapi "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" authapp "github.com/opencloud-eu/opencloud/services/auth-app/pkg/config" authbasic "github.com/opencloud-eu/opencloud/services/auth-basic/pkg/config" authbearer "github.com/opencloud-eu/opencloud/services/auth-bearer/pkg/config" @@ -19,6 +20,7 @@ import ( gateway "github.com/opencloud-eu/opencloud/services/gateway/pkg/config" graph "github.com/opencloud-eu/opencloud/services/graph/pkg/config" groups "github.com/opencloud-eu/opencloud/services/groups/pkg/config" + groupware "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" idm "github.com/opencloud-eu/opencloud/services/idm/pkg/config" idp "github.com/opencloud-eu/opencloud/services/idp/pkg/config" invitations "github.com/opencloud-eu/opencloud/services/invitations/pkg/config" @@ -100,6 +102,7 @@ type Config struct { Gateway *gateway.Config `yaml:"gateway"` Graph *graph.Config `yaml:"graph"` Groups *groups.Config `yaml:"groups"` + Groupware *groupware.Config `yaml:"groupware"` IDM *idm.Config `yaml:"idm"` IDP *idp.Config `yaml:"idp"` Invitations *invitations.Config `yaml:"invitations"` @@ -125,4 +128,5 @@ type Config struct { WebDAV *webdav.Config `yaml:"webdav"` Webfinger *webfinger.Config `yaml:"webfinger"` Search *search.Config `yaml:"search"` + AuthApi *authapi.Config `yaml:"authapi"` } diff --git a/pkg/config/defaultconfig.go b/pkg/config/defaultconfig.go index cc94e0bec2..cd13c131ae 100644 --- a/pkg/config/defaultconfig.go +++ b/pkg/config/defaultconfig.go @@ -7,6 +7,7 @@ import ( appProvider "github.com/opencloud-eu/opencloud/services/app-provider/pkg/config/defaults" appRegistry "github.com/opencloud-eu/opencloud/services/app-registry/pkg/config/defaults" audit "github.com/opencloud-eu/opencloud/services/audit/pkg/config/defaults" + authapi "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config/defaults" authapp "github.com/opencloud-eu/opencloud/services/auth-app/pkg/config/defaults" authbasic "github.com/opencloud-eu/opencloud/services/auth-basic/pkg/config/defaults" authbearer "github.com/opencloud-eu/opencloud/services/auth-bearer/pkg/config/defaults" @@ -19,6 +20,7 @@ import ( gateway "github.com/opencloud-eu/opencloud/services/gateway/pkg/config/defaults" graph "github.com/opencloud-eu/opencloud/services/graph/pkg/config/defaults" groups "github.com/opencloud-eu/opencloud/services/groups/pkg/config/defaults" + groupware "github.com/opencloud-eu/opencloud/services/groupware/pkg/config/defaults" idm "github.com/opencloud-eu/opencloud/services/idm/pkg/config/defaults" idp "github.com/opencloud-eu/opencloud/services/idp/pkg/config/defaults" invitations "github.com/opencloud-eu/opencloud/services/invitations/pkg/config/defaults" @@ -63,6 +65,7 @@ func DefaultConfig() *Config { AppProvider: appProvider.DefaultConfig(), AppRegistry: appRegistry.DefaultConfig(), Audit: audit.DefaultConfig(), + AuthApi: authapi.DefaultConfig(), AuthApp: authapp.DefaultConfig(), AuthBasic: authbasic.DefaultConfig(), AuthBearer: authbearer.DefaultConfig(), @@ -75,6 +78,7 @@ func DefaultConfig() *Config { Gateway: gateway.DefaultConfig(), Graph: graph.DefaultConfig(), Groups: groups.DefaultConfig(), + Groupware: groupware.DefaultConfig(), IDM: idm.DefaultConfig(), IDP: idp.DefaultConfig(), Invitations: invitations.DefaultConfig(), diff --git a/pkg/jmap/jmap_api.go b/pkg/jmap/jmap_api.go new file mode 100644 index 0000000000..c5578b28fc --- /dev/null +++ b/pkg/jmap/jmap_api.go @@ -0,0 +1,49 @@ +package jmap + +import ( + "context" + "io" + "net/url" + + "github.com/opencloud-eu/opencloud/pkg/log" +) + +type ApiClient interface { + Command(ctx context.Context, logger *log.Logger, session *Session, request Request, acceptLanguage string) ([]byte, Language, Error) + io.Closer +} + +type WsPushListener interface { + OnNotification(username string, stateChange StateChange) +} + +type WsClient interface { + DisableNotifications() Error + io.Closer +} + +type WsClientFactory interface { + EnableNotifications(pushState State, sessionProvider func() (*Session, error), listener WsPushListener) (WsClient, Error) + io.Closer +} + +type SessionClient interface { + GetSession(baseurl *url.URL, username string, logger *log.Logger) (SessionResponse, Error) + io.Closer +} + +type BlobClient interface { + UploadBinary(ctx context.Context, logger *log.Logger, session *Session, uploadUrl string, endpoint string, contentType string, acceptLanguage string, content io.Reader) (UploadedBlob, Language, Error) + DownloadBinary(ctx context.Context, logger *log.Logger, session *Session, downloadUrl string, endpoint string, acceptLanguage string) (*BlobDownload, Language, Error) + io.Closer +} + +const ( + logOperation = "operation" + logFetchBodies = "fetch-bodies" + logOffset = "offset" + logLimit = "limit" + logDownloadUrl = "download-url" + logBlobId = "blob-id" + logSinceState = "since-state" +) diff --git a/pkg/jmap/jmap_api_blob.go b/pkg/jmap/jmap_api_blob.go new file mode 100644 index 0000000000..bb0b0a6751 --- /dev/null +++ b/pkg/jmap/jmap_api_blob.go @@ -0,0 +1,137 @@ +package jmap + +import ( + "context" + "encoding/base64" + "io" + "strings" + + "github.com/opencloud-eu/opencloud/pkg/log" +) + +func (j *Client) GetBlobMetadata(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, id string) (*Blob, SessionState, State, Language, Error) { + cmd, jerr := j.request(session, logger, + invocation(CommandBlobGet, BlobGetCommand{ + AccountId: accountId, + Ids: []string{id}, + // add BlobPropertyData to retrieve the data + Properties: []string{BlobPropertyDigestSha256, BlobPropertyDigestSha512, BlobPropertySize}, + }, "0"), + ) + if jerr != nil { + return nil, "", "", "", jerr + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (*Blob, State, Error) { + var response BlobGetResponse + err := retrieveResponseMatchParameters(logger, body, CommandBlobGet, "0", &response) + if err != nil { + return nil, "", err + } + + if len(response.List) != 1 { + logger.Error().Msgf("%T.List has %v entries instead of 1", response, len(response.List)) + return nil, "", simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + get := response.List[0] + return &get, response.State, nil + }) +} + +type UploadedBlobWithHash struct { + BlobId string `json:"blobId"` + Size int `json:"size,omitzero"` + Type string `json:"type,omitempty"` + Sha512 string `json:"sha:512,omitempty"` +} + +func (j *Client) UploadBlobStream(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, contentType string, body io.Reader) (UploadedBlob, Language, Error) { + logger = log.From(logger.With().Str(logEndpoint, session.UploadEndpoint)) + // TODO(pbleser-oc) use a library for proper URL template parsing + uploadUrl := strings.ReplaceAll(session.UploadUrlTemplate, "{accountId}", accountId) + return j.blob.UploadBinary(ctx, logger, session, uploadUrl, session.UploadEndpoint, contentType, acceptLanguage, body) +} + +func (j *Client) DownloadBlobStream(accountId string, blobId string, name string, typ string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (*BlobDownload, Language, Error) { + logger = log.From(logger.With().Str(logEndpoint, session.DownloadEndpoint)) + // TODO(pbleser-oc) use a library for proper URL template parsing + downloadUrl := session.DownloadUrlTemplate + downloadUrl = strings.ReplaceAll(downloadUrl, "{accountId}", accountId) + downloadUrl = strings.ReplaceAll(downloadUrl, "{blobId}", blobId) + downloadUrl = strings.ReplaceAll(downloadUrl, "{name}", name) + downloadUrl = strings.ReplaceAll(downloadUrl, "{type}", typ) + logger = log.From(logger.With().Str(logDownloadUrl, downloadUrl).Str(logBlobId, blobId)) + return j.blob.DownloadBinary(ctx, logger, session, downloadUrl, session.DownloadEndpoint, acceptLanguage) +} + +func (j *Client) UploadBlob(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, data []byte, contentType string) (UploadedBlobWithHash, SessionState, State, Language, Error) { + encoded := base64.StdEncoding.EncodeToString(data) + + upload := BlobUploadCommand{ + AccountId: accountId, + Create: map[string]UploadObject{ + "0": { + Data: []DataSourceObject{{ + DataAsBase64: encoded, + }}, + Type: contentType, + }, + }, + } + + getHash := BlobGetRefCommand{ + AccountId: accountId, + IdRef: &ResultReference{ + ResultOf: "0", + Name: CommandBlobUpload, + Path: "/ids", + }, + Properties: []string{BlobPropertyDigestSha512}, + } + + cmd, jerr := j.request(session, logger, + invocation(CommandBlobUpload, upload, "0"), + invocation(CommandBlobGet, getHash, "1"), + ) + if jerr != nil { + return UploadedBlobWithHash{}, "", "", "", jerr + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (UploadedBlobWithHash, State, Error) { + var uploadResponse BlobUploadResponse + err := retrieveResponseMatchParameters(logger, body, CommandBlobUpload, "0", &uploadResponse) + if err != nil { + return UploadedBlobWithHash{}, "", err + } + + var getResponse BlobGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandBlobGet, "1", &getResponse) + if err != nil { + return UploadedBlobWithHash{}, "", err + } + + if len(uploadResponse.Created) != 1 { + logger.Error().Msgf("%T.Created has %v entries instead of 1", uploadResponse, len(uploadResponse.Created)) + return UploadedBlobWithHash{}, "", simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + upload, ok := uploadResponse.Created["0"] + if !ok { + logger.Error().Msgf("%T.Created has no item '0'", uploadResponse) + return UploadedBlobWithHash{}, "", simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + + if len(getResponse.List) != 1 { + logger.Error().Msgf("%T.List has %v entries instead of 1", getResponse, len(getResponse.List)) + return UploadedBlobWithHash{}, "", simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + get := getResponse.List[0] + + return UploadedBlobWithHash{ + BlobId: upload.Id, + Size: upload.Size, + Type: upload.Type, + Sha512: get.DigestSha512, + }, getResponse.State, nil + }) + +} diff --git a/pkg/jmap/jmap_api_bootstrap.go b/pkg/jmap/jmap_api_bootstrap.go new file mode 100644 index 0000000000..2635bf273a --- /dev/null +++ b/pkg/jmap/jmap_api_bootstrap.go @@ -0,0 +1,75 @@ +package jmap + +import ( + "context" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +type AccountBootstrapResult struct { + Identities []Identity `json:"identities,omitempty"` + Quotas []Quota `json:"quotas,omitempty"` +} + +func (j *Client) GetBootstrap(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]AccountBootstrapResult, SessionState, State, Language, Error) { + uniqueAccountIds := structs.Uniq(accountIds) + + logger = j.logger("GetBootstrap", session, logger) + + calls := make([]Invocation, len(uniqueAccountIds)*2) + for i, accountId := range uniqueAccountIds { + calls[i*2+0] = invocation(CommandIdentityGet, IdentityGetCommand{AccountId: accountId}, mcid(accountId, "I")) + calls[i*2+1] = invocation(CommandQuotaGet, QuotaGetCommand{AccountId: accountId}, mcid(accountId, "Q")) + } + + cmd, err := j.request(session, logger, calls...) + if err != nil { + return nil, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]AccountBootstrapResult, State, Error) { + identityPerAccount := map[string][]Identity{} + quotaPerAccount := map[string][]Quota{} + identityStatesPerAccount := map[string]State{} + quotaStatesPerAccount := map[string]State{} + for _, accountId := range uniqueAccountIds { + var identityResponse IdentityGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentityGet, mcid(accountId, "I"), &identityResponse) + if err != nil { + return nil, "", err + } else { + identityPerAccount[accountId] = identityResponse.List + identityStatesPerAccount[accountId] = identityResponse.State + } + + var quotaResponse QuotaGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandQuotaGet, mcid(accountId, "Q"), "aResponse) + if err != nil { + return nil, "", err + } else { + quotaPerAccount[accountId] = quotaResponse.List + quotaStatesPerAccount[accountId] = quotaResponse.State + } + } + + result := map[string]AccountBootstrapResult{} + for accountId, value := range identityPerAccount { + r, ok := result[accountId] + if !ok { + r = AccountBootstrapResult{} + } + r.Identities = value + result[accountId] = r + } + for accountId, value := range quotaPerAccount { + r, ok := result[accountId] + if !ok { + r = AccountBootstrapResult{} + } + r.Quotas = value + result[accountId] = r + } + + return result, squashStateMaps(identityStatesPerAccount, quotaStatesPerAccount), nil + }) +} diff --git a/pkg/jmap/jmap_api_calendar.go b/pkg/jmap/jmap_api_calendar.go new file mode 100644 index 0000000000..1e78dcb9a2 --- /dev/null +++ b/pkg/jmap/jmap_api_calendar.go @@ -0,0 +1,250 @@ +package jmap + +import ( + "context" + "fmt" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +func (j *Client) ParseICalendarBlob(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, blobIds []string) (CalendarEventParseResponse, SessionState, State, Language, Error) { + logger = j.logger("ParseICalendarBlob", session, logger) + + cmd, err := j.request(session, logger, + invocation(CommandCalendarEventParse, CalendarEventParseCommand{AccountId: accountId, BlobIds: blobIds}, "0"), + ) + if err != nil { + return CalendarEventParseResponse{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (CalendarEventParseResponse, State, Error) { + var response CalendarEventParseResponse + err = retrieveResponseMatchParameters(logger, body, CommandCalendarEventParse, "0", &response) + if err != nil { + return CalendarEventParseResponse{}, "", err + } + return response, "", nil + }) +} + +type CalendarsResponse struct { + Calendars []Calendar `json:"calendars"` + NotFound []string `json:"notFound,omitempty"` +} + +func (j *Client) GetCalendars(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ids []string) (CalendarsResponse, SessionState, State, Language, Error) { + return getTemplate(j, "GetCalendars", CommandCalendarGet, + func(accountId string, ids []string) CalendarGetCommand { + return CalendarGetCommand{AccountId: accountId, Ids: ids} + }, + func(resp CalendarGetResponse) CalendarsResponse { + return CalendarsResponse{Calendars: resp.List, NotFound: resp.NotFound} + }, + func(resp CalendarGetResponse) State { return resp.State }, + accountId, session, ctx, logger, acceptLanguage, ids, + ) +} + +func (j *Client) QueryCalendarEvents(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, + filter CalendarEventFilterElement, sortBy []CalendarEventComparator, + position uint, limit uint) (map[string][]CalendarEvent, SessionState, State, Language, Error) { + logger = j.logger("QueryCalendarEvents", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + + if sortBy == nil { + sortBy = []CalendarEventComparator{{Property: CalendarEventPropertyStart, IsAscending: false}} + } + + invocations := make([]Invocation, len(uniqueAccountIds)*2) + for i, accountId := range uniqueAccountIds { + query := CalendarEventQueryCommand{ + AccountId: accountId, + Filter: filter, + Sort: sortBy, + } + if limit > 0 { + query.Limit = limit + } + if position > 0 { + query.Position = position + } + invocations[i*2+0] = invocation(CommandCalendarEventQuery, query, mcid(accountId, "0")) + invocations[i*2+1] = invocation(CommandCalendarEventGet, CalendarEventGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + Name: CommandCalendarEventQuery, + Path: "/ids/*", + ResultOf: mcid(accountId, "0"), + }, + // Properties: CalendarEventProperties, // to also retrieve UTCStart and UTCEnd + }, mcid(accountId, "1")) + } + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string][]CalendarEvent, State, Error) { + resp := map[string][]CalendarEvent{} + stateByAccountId := map[string]State{} + for _, accountId := range uniqueAccountIds { + var response CalendarEventGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandCalendarEventGet, mcid(accountId, "1"), &response) + if err != nil { + return nil, "", err + } + if len(response.NotFound) > 0 { + // TODO what to do when there are not-found emails here? potentially nothing, they could have been deleted between query and get? + } + resp[accountId] = response.List + stateByAccountId[accountId] = response.State + } + return resp, squashState(stateByAccountId), nil + }) +} + +func (j *Client) CreateCalendarEvent(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, create CalendarEvent) (*CalendarEvent, SessionState, State, Language, Error) { + return createTemplate(j, "CreateCalendarEvent", CalendarEventType, CommandCalendarEventSet, CommandCalendarEventGet, + func(accountId string, create map[string]CalendarEvent) CalendarEventSetCommand { + return CalendarEventSetCommand{AccountId: accountId, Create: create} + }, + func(accountId string, ref string) CalendarEventGetCommand { + return CalendarEventGetCommand{AccountId: accountId, Ids: []string{ref}} + }, + func(resp CalendarEventSetResponse) map[string]*CalendarEvent { + return resp.Created + }, + func(resp CalendarEventSetResponse) map[string]SetError { + return resp.NotCreated + }, + func(resp CalendarEventGetResponse) []CalendarEvent { + return resp.List + }, + func(resp CalendarEventSetResponse) State { + return resp.NewState + }, + accountId, session, ctx, logger, acceptLanguage, create) +} + +func (j *Client) DeleteCalendarEvent(accountId string, destroy []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]SetError, SessionState, State, Language, Error) { + return deleteTemplate(j, "DeleteCalendarEvent", CommandCalendarEventSet, + func(accountId string, destroy []string) CalendarEventSetCommand { + return CalendarEventSetCommand{AccountId: accountId, Destroy: destroy} + }, + func(resp CalendarEventSetResponse) map[string]SetError { return resp.NotDestroyed }, + func(resp CalendarEventSetResponse) State { return resp.NewState }, + accountId, destroy, session, ctx, logger, acceptLanguage) +} + +func getTemplate[GETREQ any, GETRESP any, RESP any]( + client *Client, name string, getCommand Command, + getCommandFactory func(string, []string) GETREQ, + mapper func(GETRESP) RESP, + stateMapper func(GETRESP) State, + accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ids []string) (RESP, SessionState, State, Language, Error) { + logger = client.logger(name, session, logger) + + var zero RESP + + cmd, err := client.request(session, logger, + invocation(getCommand, getCommandFactory(accountId, ids), "0"), + ) + if err != nil { + return zero, "", "", "", err + } + + return command(client.api, logger, ctx, session, client.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (RESP, State, Error) { + var response GETRESP + err = retrieveResponseMatchParameters(logger, body, getCommand, "0", &response) + if err != nil { + return zero, "", err + } + + return mapper(response), stateMapper(response), nil + }) +} + +func createTemplate[T any, SETREQ any, GETREQ any, SETRESP any, GETRESP any]( + client *Client, name string, t ObjectType, setCommand Command, getCommand Command, + setCommandFactory func(string, map[string]T) SETREQ, + getCommandFactory func(string, string) GETREQ, + createdMapper func(SETRESP) map[string]*T, + notCreatedMapper func(SETRESP) map[string]SetError, + listMapper func(GETRESP) []T, + stateMapper func(SETRESP) State, + accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, create T) (*T, SessionState, State, Language, Error) { + logger = client.logger(name, session, logger) + + createMap := map[string]T{"c": create} + cmd, err := client.request(session, logger, + invocation(setCommand, setCommandFactory(accountId, createMap), "0"), + invocation(getCommand, getCommandFactory(accountId, "#c"), "1"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(client.api, logger, ctx, session, client.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (*T, State, Error) { + var setResponse SETRESP + err = retrieveResponseMatchParameters(logger, body, setCommand, "0", &setResponse) + if err != nil { + return nil, "", err + } + + notCreatedMap := notCreatedMapper(setResponse) + setErr, notok := notCreatedMap["c"] + if notok { + logger.Error().Msgf("%T.NotCreated returned an error %v", setResponse, setErr) + return nil, "", setErrorError(setErr, t) + } + + createdMap := createdMapper(setResponse) + if created, ok := createdMap["c"]; !ok || created == nil { + berr := fmt.Errorf("failed to find %s in %s response", string(t), string(setCommand)) + logger.Error().Err(berr) + return nil, "", simpleError(berr, JmapErrorInvalidJmapResponsePayload) + } + + var getResponse GETRESP + err = retrieveResponseMatchParameters(logger, body, getCommand, "1", &getResponse) + if err != nil { + return nil, "", err + } + + list := listMapper(getResponse) + + if len(list) < 1 { + berr := fmt.Errorf("failed to find %s in %s response", string(t), string(getCommand)) + logger.Error().Err(berr) + return nil, "", simpleError(berr, JmapErrorInvalidJmapResponsePayload) + } + + return &list[0], stateMapper(setResponse), nil + }) +} + +func deleteTemplate[REQ any, RESP any](client *Client, name string, c Command, + commandFactory func(string, []string) REQ, + notDestroyedMapper func(RESP) map[string]SetError, + stateMapper func(RESP) State, + accountId string, destroy []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]SetError, SessionState, State, Language, Error) { + logger = client.logger(name, session, logger) + + cmd, err := client.request(session, logger, + invocation(c, commandFactory(accountId, destroy), "0"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(client.api, logger, ctx, session, client.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]SetError, State, Error) { + var setResponse RESP + err = retrieveResponseMatchParameters(logger, body, c, "0", &setResponse) + if err != nil { + return nil, "", err + } + return notDestroyedMapper(setResponse), stateMapper(setResponse), nil + }) +} diff --git a/pkg/jmap/jmap_api_contact.go b/pkg/jmap/jmap_api_contact.go new file mode 100644 index 0000000000..a09584fbf4 --- /dev/null +++ b/pkg/jmap/jmap_api_contact.go @@ -0,0 +1,199 @@ +package jmap + +import ( + "context" + "fmt" + + "github.com/opencloud-eu/opencloud/pkg/jscontact" + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +type AddressBooksResponse struct { + AddressBooks []AddressBook `json:"addressbooks"` + NotFound []string `json:"notFound,omitempty"` +} + +func (j *Client) GetAddressbooks(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ids []string) (AddressBooksResponse, SessionState, State, Language, Error) { + logger = j.logger("GetAddressbooks", session, logger) + + cmd, err := j.request(session, logger, + invocation(CommandAddressBookGet, AddressBookGetCommand{AccountId: accountId, Ids: ids}, "0"), + ) + if err != nil { + return AddressBooksResponse{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (AddressBooksResponse, State, Error) { + var response AddressBookGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandAddressBookGet, "0", &response) + if err != nil { + return AddressBooksResponse{}, response.State, err + } + return AddressBooksResponse{ + AddressBooks: response.List, + NotFound: response.NotFound, + }, response.State, nil + }) +} + +func (j *Client) GetContactCardsById(accountId string, session *Session, ctx context.Context, logger *log.Logger, + acceptLanguage string, contactIds []string) (map[string]jscontact.ContactCard, SessionState, State, Language, Error) { + logger = j.logger("GetContactCardsById", session, logger) + + cmd, err := j.request(session, logger, invocation(CommandContactCardGet, ContactCardGetCommand{ + Ids: contactIds, + AccountId: accountId, + }, "0")) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]jscontact.ContactCard, State, Error) { + var response ContactCardGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandContactCardGet, "0", &response) + if err != nil { + return nil, "", err + } + m := map[string]jscontact.ContactCard{} + for _, contact := range response.List { + m[contact.Id] = contact + } + return m, response.State, nil + }) +} + +func (j *Client) QueryContactCards(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, + filter ContactCardFilterElement, sortBy []ContactCardComparator, + position uint, limit uint) (map[string][]jscontact.ContactCard, SessionState, State, Language, Error) { + logger = j.logger("QueryContactCards", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + + if sortBy == nil { + sortBy = []ContactCardComparator{{Property: jscontact.ContactCardPropertyUpdated, IsAscending: false}} + } + + invocations := make([]Invocation, len(uniqueAccountIds)*2) + for i, accountId := range uniqueAccountIds { + query := ContactCardQueryCommand{ + AccountId: accountId, + Filter: filter, + Sort: sortBy, + } + if limit > 0 { + query.Limit = limit + } + if position > 0 { + query.Position = position + } + invocations[i*2+0] = invocation(CommandContactCardQuery, query, mcid(accountId, "0")) + invocations[i*2+1] = invocation(CommandContactCardGet, ContactCardGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + Name: CommandContactCardQuery, + Path: "/ids/*", + ResultOf: mcid(accountId, "0"), + }, + }, mcid(accountId, "1")) + } + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string][]jscontact.ContactCard, State, Error) { + resp := map[string][]jscontact.ContactCard{} + stateByAccountId := map[string]State{} + for _, accountId := range uniqueAccountIds { + var response ContactCardGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandContactCardGet, mcid(accountId, "1"), &response) + if err != nil { + return nil, "", err + } + if len(response.NotFound) > 0 { + // TODO what to do when there are not-found emails here? potentially nothing, they could have been deleted between query and get? + } + resp[accountId] = response.List + stateByAccountId[accountId] = response.State + } + return resp, squashState(stateByAccountId), nil + }) +} + +func (j *Client) CreateContactCard(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, create jscontact.ContactCard) (*jscontact.ContactCard, SessionState, State, Language, Error) { + logger = j.logger("CreateContactCard", session, logger) + + cmd, err := j.request(session, logger, + invocation(CommandContactCardSet, ContactCardSetCommand{ + AccountId: accountId, + Create: map[string]jscontact.ContactCard{ + "c": create, + }, + }, "0"), + invocation(CommandContactCardGet, ContactCardGetCommand{ + AccountId: accountId, + Ids: []string{"#c"}, + }, "1"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (*jscontact.ContactCard, State, Error) { + var setResponse ContactCardSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandContactCardSet, "0", &setResponse) + if err != nil { + return nil, "", err + } + + setErr, notok := setResponse.NotCreated["c"] + if notok { + logger.Error().Msgf("%T.NotCreated returned an error %v", setResponse, setErr) + return nil, "", setErrorError(setErr, EmailType) + } + + if created, ok := setResponse.Created["c"]; !ok || created == nil { + berr := fmt.Errorf("failed to find %s in %s response", string(ContactCardType), string(CommandContactCardSet)) + logger.Error().Err(berr) + return nil, "", simpleError(berr, JmapErrorInvalidJmapResponsePayload) + } + + var getResponse ContactCardGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandContactCardGet, "1", &getResponse) + if err != nil { + return nil, "", err + } + + if len(getResponse.List) < 1 { + berr := fmt.Errorf("failed to find %s in %s response", string(ContactCardType), string(CommandContactCardSet)) + logger.Error().Err(berr) + return nil, "", simpleError(berr, JmapErrorInvalidJmapResponsePayload) + } + + return &getResponse.List[0], setResponse.NewState, nil + }) +} + +func (j *Client) DeleteContactCard(accountId string, destroy []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]SetError, SessionState, State, Language, Error) { + logger = j.logger("DeleteContactCard", session, logger) + + cmd, err := j.request(session, logger, + invocation(CommandContactCardSet, ContactCardSetCommand{ + AccountId: accountId, + Destroy: destroy, + }, "0"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]SetError, State, Error) { + var setResponse ContactCardSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandContactCardSet, "0", &setResponse) + if err != nil { + return nil, "", err + } + return setResponse.NotDestroyed, setResponse.NewState, nil + }) +} diff --git a/pkg/jmap/jmap_api_email.go b/pkg/jmap/jmap_api_email.go new file mode 100644 index 0000000000..c728dead77 --- /dev/null +++ b/pkg/jmap/jmap_api_email.go @@ -0,0 +1,1105 @@ +package jmap + +import ( + "context" + "encoding/base64" + "fmt" + "time" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" + "github.com/rs/zerolog" +) + +type Emails struct { + Emails []Email `json:"emails,omitempty"` + Total uint `json:"total,omitzero"` + Limit uint `json:"limit,omitzero"` + Offset uint `json:"offset,omitzero"` +} + +type getEmailsResult struct { + emails []Email + notFound []string +} + +// Retrieve specific Emails by their id. +func (j *Client) GetEmails(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ids []string, fetchBodies bool, maxBodyValueBytes uint, markAsSeen bool, withThreads bool) ([]Email, []string, SessionState, State, Language, Error) { + logger = j.logger("GetEmails", session, logger) + + get := EmailGetCommand{AccountId: accountId, Ids: ids, FetchAllBodyValues: fetchBodies} + if maxBodyValueBytes > 0 { + get.MaxBodyValueBytes = maxBodyValueBytes + } + invokeGet := invocation(CommandEmailGet, get, "1") + + methodCalls := []Invocation{invokeGet} + if markAsSeen { + updates := make(map[string]EmailUpdate, len(ids)) + for _, id := range ids { + updates[id] = EmailUpdate{EmailPropertyKeywords + "/" + JmapKeywordSeen: true} + } + mark := EmailSetCommand{AccountId: accountId, Update: updates} + methodCalls = []Invocation{invocation(CommandEmailSet, mark, "0"), invokeGet} + } + if withThreads { + threads := ThreadGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + ResultOf: "1", + Name: CommandEmailGet, + Path: "/list/*/" + EmailPropertyThreadId, + }, + } + methodCalls = append(methodCalls, invocation(CommandThreadGet, threads, "2")) + } + + cmd, err := j.request(session, logger, methodCalls...) + if err != nil { + logger.Error().Err(err).Send() + return nil, nil, "", "", "", simpleError(err, JmapErrorInvalidJmapRequestPayload) + } + result, sessionState, state, language, gwerr := command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (getEmailsResult, State, Error) { + if markAsSeen { + var markResponse EmailSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSet, "0", &markResponse) + if err != nil { + return getEmailsResult{}, "", err + } + for _, seterr := range markResponse.NotUpdated { + // TODO we don't have a way to compose multiple set errors yet + return getEmailsResult{}, "", setErrorError(seterr, EmailType) + } + } + var response EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "1", &response) + if err != nil { + return getEmailsResult{}, "", err + } + if withThreads { + var threads ThreadGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandThreadGet, "2", &threads) + if err != nil { + return getEmailsResult{}, "", err + } + setThreadSize(&threads, response.List) + } + return getEmailsResult{emails: response.List, notFound: response.NotFound}, response.State, nil + }) + return result.emails, result.notFound, sessionState, state, language, gwerr +} + +func (j *Client) GetEmailBlobId(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, id string) (string, SessionState, State, Language, Error) { + logger = j.logger("GetEmailBlobId", session, logger) + + get := EmailGetCommand{AccountId: accountId, Ids: []string{id}, FetchAllBodyValues: false, Properties: []string{"blobId"}} + cmd, err := j.request(session, logger, invocation(CommandEmailGet, get, "0")) + if err != nil { + logger.Error().Err(err).Send() + return "", "", "", "", simpleError(err, JmapErrorInvalidJmapRequestPayload) + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (string, State, Error) { + var response EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "0", &response) + if err != nil { + return "", "", err + } + if len(response.List) != 1 { + return "", "", nil + } + email := response.List[0] + return email.BlobId, response.State, nil + }) +} + +// Retrieve all the Emails in a given Mailbox by its id. +func (j *Client) GetAllEmailsInMailbox(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, mailboxId string, offset int, limit uint, collapseThreads bool, fetchBodies bool, maxBodyValueBytes uint, withThreads bool) (Emails, SessionState, State, Language, Error) { + logger = j.loggerParams("GetAllEmailsInMailbox", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Bool(logFetchBodies, fetchBodies).Int(logOffset, offset).Uint(logLimit, limit) + }) + + query := EmailQueryCommand{ + AccountId: accountId, + Filter: &EmailFilterCondition{InMailbox: mailboxId}, + Sort: []EmailComparator{{Property: EmailPropertyReceivedAt, IsAscending: false}}, + CollapseThreads: collapseThreads, + CalculateTotal: true, + } + if offset > 0 { + query.Position = offset + } + if limit > 0 { + query.Limit = &limit + } + + get := EmailGetRefCommand{ + AccountId: accountId, + FetchAllBodyValues: fetchBodies, + IdsRef: &ResultReference{Name: CommandEmailQuery, Path: "/ids/*", ResultOf: "0"}, + } + if maxBodyValueBytes > 0 { + get.MaxBodyValueBytes = maxBodyValueBytes + } + + invocations := []Invocation{ + invocation(CommandEmailQuery, query, "0"), + invocation(CommandEmailGet, get, "1"), + } + + if withThreads { + threads := ThreadGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + ResultOf: "1", + Name: CommandEmailGet, + Path: "/list/*/" + EmailPropertyThreadId, + }, + } + invocations = append(invocations, invocation(CommandThreadGet, threads, "2")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return Emails{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (Emails, State, Error) { + var queryResponse EmailQueryResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailQuery, "0", &queryResponse) + if err != nil { + return Emails{}, "", err + } + var getResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "1", &getResponse) + if err != nil { + logger.Error().Err(err).Send() + return Emails{}, "", err + } + + if withThreads { + var thread ThreadGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandThreadGet, "2", &thread) + if err != nil { + return Emails{}, "", err + } + setThreadSize(&thread, getResponse.List) + } + + return Emails{ + Emails: getResponse.List, + Total: queryResponse.Total, + Limit: queryResponse.Limit, + Offset: queryResponse.Position, + }, queryResponse.QueryState, nil + }) +} + +func (j *Client) GetEmailChanges(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, sinceState State, maxChanges uint) (EmailChangesResponse, SessionState, State, Language, Error) { + logger = j.loggerParams("GetEmailChanges", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Str(logSinceState, string(sinceState)) + }) + + changes := EmailChangesCommand{ + AccountId: accountId, + SinceState: sinceState, + } + if maxChanges > 0 { + changes.MaxChanges = maxChanges + } + + cmd, err := j.request(session, logger, invocation(CommandEmailChanges, changes, "0")) + if err != nil { + return EmailChangesResponse{}, "", "", "", simpleError(err, JmapErrorInvalidJmapRequestPayload) + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (EmailChangesResponse, State, Error) { + var changesResponse EmailChangesResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailChanges, "0", &changesResponse) + if err != nil { + return EmailChangesResponse{}, "", err + } + + return changesResponse, changesResponse.NewState, nil + }) +} + +// Get all the Emails that have been created, updated or deleted since a given state. +func (j *Client) GetEmailsSince(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, sinceState State, fetchBodies bool, maxBodyValueBytes uint, maxChanges uint) (MailboxChanges, SessionState, State, Language, Error) { + logger = j.loggerParams("GetEmailsSince", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Bool(logFetchBodies, fetchBodies).Str(logSinceState, string(sinceState)) + }) + + changes := EmailChangesCommand{ + AccountId: accountId, + SinceState: sinceState, + } + if maxChanges > 0 { + changes.MaxChanges = maxChanges + } + + getCreated := EmailGetRefCommand{ + AccountId: accountId, + FetchAllBodyValues: fetchBodies, + IdsRef: &ResultReference{Name: CommandEmailChanges, Path: "/created", ResultOf: "0"}, + } + if maxBodyValueBytes > 0 { + getCreated.MaxBodyValueBytes = maxBodyValueBytes + } + getUpdated := EmailGetRefCommand{ + AccountId: accountId, + FetchAllBodyValues: fetchBodies, + IdsRef: &ResultReference{Name: CommandEmailChanges, Path: "/updated", ResultOf: "0"}, + } + if maxBodyValueBytes > 0 { + getUpdated.MaxBodyValueBytes = maxBodyValueBytes + } + + cmd, err := j.request(session, logger, + invocation(CommandEmailChanges, changes, "0"), + invocation(CommandEmailGet, getCreated, "1"), + invocation(CommandEmailGet, getUpdated, "2"), + ) + if err != nil { + return MailboxChanges{}, "", "", "", simpleError(err, JmapErrorInvalidJmapRequestPayload) + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (MailboxChanges, State, Error) { + var changesResponse EmailChangesResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailChanges, "0", &changesResponse) + if err != nil { + return MailboxChanges{}, "", err + } + + var createdResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "1", &createdResponse) + if err != nil { + logger.Error().Err(err).Send() + return MailboxChanges{}, "", err + } + + var updatedResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "2", &updatedResponse) + if err != nil { + logger.Error().Err(err).Send() + return MailboxChanges{}, "", err + } + + return MailboxChanges{ + Destroyed: changesResponse.Destroyed, + HasMoreChanges: changesResponse.HasMoreChanges, + NewState: changesResponse.NewState, + Created: createdResponse.List, + Updated: updatedResponse.List, + }, updatedResponse.State, nil + }) +} + +type SearchSnippetWithMeta struct { + ReceivedAt time.Time `json:"receivedAt,omitzero"` + EmailId string `json:"emailId,omitempty"` + SearchSnippet +} + +type EmailSnippetQueryResult struct { + Snippets []SearchSnippetWithMeta `json:"snippets,omitempty"` + Total uint `json:"total"` + Limit uint `json:"limit,omitzero"` + Position uint `json:"position,omitzero"` + QueryState State `json:"queryState"` +} + +func (j *Client) QueryEmailSnippets(accountIds []string, filter EmailFilterElement, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, offset int, limit uint) (map[string]EmailSnippetQueryResult, SessionState, State, Language, Error) { + logger = j.loggerParams("QueryEmailSnippets", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Uint(logLimit, limit).Int(logOffset, offset) + }) + + uniqueAccountIds := structs.Uniq(accountIds) + invocations := make([]Invocation, len(uniqueAccountIds)*3) + for i, accountId := range uniqueAccountIds { + query := EmailQueryCommand{ + AccountId: accountId, + Filter: filter, + Sort: []EmailComparator{{Property: EmailPropertyReceivedAt, IsAscending: false}}, + CollapseThreads: true, + CalculateTotal: true, + } + if offset > 0 { + query.Position = offset + } + if limit > 0 { + query.Limit = &limit + } + + mails := EmailGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + ResultOf: mcid(accountId, "0"), + Name: CommandEmailQuery, + Path: "/ids/*", + }, + FetchAllBodyValues: false, + MaxBodyValueBytes: 0, + Properties: []string{EmailPropertyId, EmailPropertyReceivedAt, EmailPropertySentAt}, + } + + snippet := SearchSnippetGetRefCommand{ + AccountId: accountId, + Filter: filter, + EmailIdRef: &ResultReference{ + ResultOf: mcid(accountId, "0"), + Name: CommandEmailQuery, + Path: "/ids/*", + }, + } + + invocations[i*3+0] = invocation(CommandEmailQuery, query, mcid(accountId, "0")) + invocations[i*3+1] = invocation(CommandEmailGet, mails, mcid(accountId, "1")) + invocations[i*3+2] = invocation(CommandSearchSnippetGet, snippet, mcid(accountId, "2")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]EmailSnippetQueryResult, State, Error) { + results := make(map[string]EmailSnippetQueryResult, len(uniqueAccountIds)) + for _, accountId := range uniqueAccountIds { + var queryResponse EmailQueryResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailQuery, mcid(accountId, "0"), &queryResponse) + if err != nil { + return nil, "", err + } + + var mailResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, mcid(accountId, "1"), &mailResponse) + if err != nil { + return nil, "", err + } + + var snippetResponse SearchSnippetGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandSearchSnippetGet, mcid(accountId, "2"), &snippetResponse) + if err != nil { + return nil, "", err + } + + mailResponseById := structs.Index(mailResponse.List, func(e Email) string { return e.Id }) + + snippets := make([]SearchSnippetWithMeta, len(queryResponse.Ids)) + if len(queryResponse.Ids) > len(snippetResponse.List) { + // TODO how do we handle this, if there are more email IDs than snippets? + } + + i := 0 + for _, id := range queryResponse.Ids { + if mail, ok := mailResponseById[id]; ok { + snippets[i] = SearchSnippetWithMeta{ + EmailId: id, + ReceivedAt: mail.ReceivedAt, + SearchSnippet: snippetResponse.List[i], + } + } else { + // TODO how do we handle this, if there is no email result for that id? + } + i++ + } + + results[accountId] = EmailSnippetQueryResult{ + Snippets: snippets, + Total: queryResponse.Total, + Limit: queryResponse.Limit, + Position: queryResponse.Position, + QueryState: queryResponse.QueryState, + } + } + return results, squashStateFunc(results, func(r EmailSnippetQueryResult) State { return r.QueryState }), nil + }) +} + +type EmailQueryResult struct { + Emails []Email `json:"emails"` + Total uint `json:"total"` + Limit uint `json:"limit,omitzero"` + Position uint `json:"position,omitzero"` + QueryState State `json:"queryState"` +} + +func (j *Client) QueryEmails(accountIds []string, filter EmailFilterElement, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, offset int, limit uint, fetchBodies bool, maxBodyValueBytes uint) (map[string]EmailQueryResult, SessionState, State, Language, Error) { + logger = j.loggerParams("QueryEmails", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Bool(logFetchBodies, fetchBodies) + }) + + uniqueAccountIds := structs.Uniq(accountIds) + invocations := make([]Invocation, len(uniqueAccountIds)*2) + for i, accountId := range uniqueAccountIds { + query := EmailQueryCommand{ + AccountId: accountId, + Filter: filter, + Sort: []EmailComparator{{Property: EmailPropertyReceivedAt, IsAscending: false}}, + CollapseThreads: true, + CalculateTotal: true, + } + if offset > 0 { + query.Position = offset + } + if limit > 0 { + query.Limit = &limit + } + + mails := EmailGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + ResultOf: mcid(accountId, "0"), + Name: CommandEmailQuery, + Path: "/ids/*", + }, + FetchAllBodyValues: fetchBodies, + MaxBodyValueBytes: maxBodyValueBytes, + } + + invocations[i*2+0] = invocation(CommandEmailQuery, query, mcid(accountId, "0")) + invocations[i*2+1] = invocation(CommandEmailGet, mails, mcid(accountId, "1")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]EmailQueryResult, State, Error) { + results := make(map[string]EmailQueryResult, len(uniqueAccountIds)) + for _, accountId := range uniqueAccountIds { + var queryResponse EmailQueryResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailQuery, mcid(accountId, "0"), &queryResponse) + if err != nil { + return nil, "", err + } + + var emailsResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, mcid(accountId, "1"), &emailsResponse) + if err != nil { + return nil, "", err + } + + results[accountId] = EmailQueryResult{ + Emails: emailsResponse.List, + Total: queryResponse.Total, + Limit: queryResponse.Limit, + Position: queryResponse.Position, + QueryState: queryResponse.QueryState, + } + } + return results, squashStateFunc(results, func(r EmailQueryResult) State { return r.QueryState }), nil + }) +} + +type EmailWithSnippets struct { + Email Email `json:"email"` + Snippets []SearchSnippet `json:"snippets,omitempty"` +} + +type EmailQueryWithSnippetsResult struct { + Results []EmailWithSnippets `json:"results"` + Total uint `json:"total"` + Limit uint `json:"limit,omitzero"` + Position uint `json:"position,omitzero"` + QueryState State `json:"queryState"` +} + +func (j *Client) QueryEmailsWithSnippets(accountIds []string, filter EmailFilterElement, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, offset int, limit uint, fetchBodies bool, maxBodyValueBytes uint) (map[string]EmailQueryWithSnippetsResult, SessionState, State, Language, Error) { + logger = j.loggerParams("QueryEmailsWithSnippets", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Bool(logFetchBodies, fetchBodies) + }) + + uniqueAccountIds := structs.Uniq(accountIds) + invocations := make([]Invocation, len(uniqueAccountIds)*3) + for i, accountId := range uniqueAccountIds { + query := EmailQueryCommand{ + AccountId: accountId, + Filter: filter, + Sort: []EmailComparator{{Property: EmailPropertyReceivedAt, IsAscending: false}}, + CollapseThreads: false, + CalculateTotal: true, + } + if offset > 0 { + query.Position = offset + } + if limit > 0 { + query.Limit = &limit + } + + snippet := SearchSnippetGetRefCommand{ + AccountId: accountId, + Filter: filter, + EmailIdRef: &ResultReference{ + ResultOf: mcid(accountId, "0"), + Name: CommandEmailQuery, + Path: "/ids/*", + }, + } + + mails := EmailGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + ResultOf: mcid(accountId, "0"), + Name: CommandEmailQuery, + Path: "/ids/*", + }, + FetchAllBodyValues: fetchBodies, + MaxBodyValueBytes: maxBodyValueBytes, + } + invocations[i*3+0] = invocation(CommandEmailQuery, query, mcid(accountId, "0")) + invocations[i*3+1] = invocation(CommandSearchSnippetGet, snippet, mcid(accountId, "1")) + invocations[i*3+2] = invocation(CommandEmailGet, mails, mcid(accountId, "2")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + logger.Error().Err(err).Send() + return nil, "", "", "", simpleError(err, JmapErrorInvalidJmapRequestPayload) + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]EmailQueryWithSnippetsResult, State, Error) { + result := make(map[string]EmailQueryWithSnippetsResult, len(uniqueAccountIds)) + for _, accountId := range uniqueAccountIds { + var queryResponse EmailQueryResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailQuery, mcid(accountId, "0"), &queryResponse) + if err != nil { + return nil, "", err + } + + var snippetResponse SearchSnippetGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandSearchSnippetGet, mcid(accountId, "1"), &snippetResponse) + if err != nil { + return nil, "", err + } + + var emailsResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, mcid(accountId, "2"), &emailsResponse) + if err != nil { + return nil, "", err + } + + snippetsById := map[string][]SearchSnippet{} + for _, snippet := range snippetResponse.List { + list, ok := snippetsById[snippet.EmailId] + if !ok { + list = []SearchSnippet{} + } + snippetsById[snippet.EmailId] = append(list, snippet) + } + + results := []EmailWithSnippets{} + for _, email := range emailsResponse.List { + snippets, ok := snippetsById[email.Id] + if !ok { + snippets = []SearchSnippet{} + } + results = append(results, EmailWithSnippets{ + Email: email, + Snippets: snippets, + }) + } + + result[accountId] = EmailQueryWithSnippetsResult{ + Results: results, + Total: queryResponse.Total, + Limit: queryResponse.Limit, + Position: queryResponse.Position, + QueryState: queryResponse.QueryState, + } + } + return result, squashStateFunc(result, func(r EmailQueryWithSnippetsResult) State { return r.QueryState }), nil + }) +} + +type UploadedEmail struct { + Id string `json:"id"` + Size int `json:"size"` + Type string `json:"type"` + Sha512 string `json:"sha:512"` +} + +func (j *Client) ImportEmail(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, data []byte) (UploadedEmail, SessionState, State, Language, Error) { + encoded := base64.StdEncoding.EncodeToString(data) + + upload := BlobUploadCommand{ + AccountId: accountId, + Create: map[string]UploadObject{ + "0": { + Data: []DataSourceObject{{ + DataAsBase64: encoded, + }}, + Type: EmailMimeType, + }, + }, + } + + getHash := BlobGetRefCommand{ + AccountId: accountId, + IdRef: &ResultReference{ + ResultOf: "0", + Name: CommandBlobUpload, + Path: "/ids", + }, + Properties: []string{BlobPropertyDigestSha512}, + } + + cmd, err := j.request(session, logger, + invocation(CommandBlobUpload, upload, "0"), + invocation(CommandBlobGet, getHash, "1"), + ) + if err != nil { + return UploadedEmail{}, "", "", "", simpleError(err, JmapErrorInvalidJmapRequestPayload) + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (UploadedEmail, State, Error) { + var uploadResponse BlobUploadResponse + err = retrieveResponseMatchParameters(logger, body, CommandBlobUpload, "0", &uploadResponse) + if err != nil { + return UploadedEmail{}, "", err + } + + var getResponse BlobGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandBlobGet, "1", &getResponse) + if err != nil { + logger.Error().Err(err).Send() + return UploadedEmail{}, "", err + } + + if len(uploadResponse.Created) != 1 { + logger.Error().Msgf("%T.Created has %v elements instead of 1", uploadResponse, len(uploadResponse.Created)) + return UploadedEmail{}, "", simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + upload, ok := uploadResponse.Created["0"] + if !ok { + logger.Error().Msgf("%T.Created has no element '0'", uploadResponse) + return UploadedEmail{}, "", simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + + if len(getResponse.List) != 1 { + logger.Error().Msgf("%T.List has %v elements instead of 1", getResponse, len(getResponse.List)) + return UploadedEmail{}, "", simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + get := getResponse.List[0] + + return UploadedEmail{ + Id: upload.Id, + Size: upload.Size, + Type: upload.Type, + Sha512: get.DigestSha512, + }, State(get.DigestSha256), nil + }) + +} + +func (j *Client) CreateEmail(accountId string, email EmailCreate, replaceId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (*Email, SessionState, State, Language, Error) { + set := EmailSetCommand{ + AccountId: accountId, + Create: map[string]EmailCreate{ + "c": email, + }, + } + if replaceId != "" { + set.Destroy = []string{replaceId} + } + + cmd, err := j.request(session, logger, + invocation(CommandEmailSet, set, "0"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (*Email, State, Error) { + var setResponse EmailSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSet, "0", &setResponse) + if err != nil { + return nil, "", err + } + + if len(setResponse.NotCreated) > 0 { + // error occured + // TODO(pbleser-oc) handle submission errors + } + + setErr, notok := setResponse.NotCreated["c"] + if notok { + logger.Error().Msgf("%T.NotCreated returned an error %v", setResponse, setErr) + return nil, "", setErrorError(setErr, EmailType) + } + + created, ok := setResponse.Created["c"] + if !ok { + berr := fmt.Errorf("failed to find %s in %s response", string(EmailType), string(CommandEmailSet)) + logger.Error().Err(berr) + return nil, "", simpleError(berr, JmapErrorInvalidJmapResponsePayload) + } + + return created, setResponse.NewState, nil + }) +} + +// The Email/set method encompasses: +// - Changing the keywords of an Email (e.g., unread/flagged status) +// - Adding/removing an Email to/from Mailboxes (moving a message) +// - Deleting Emails +// +// To create drafts, use the CreateEmail function instead. +// +// To delete mails, use the DeleteEmails function instead. +func (j *Client) UpdateEmails(accountId string, updates map[string]EmailUpdate, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]*Email, SessionState, State, Language, Error) { + cmd, err := j.request(session, logger, + invocation(CommandEmailSet, EmailSetCommand{ + AccountId: accountId, + Update: updates, + }, "0"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]*Email, State, Error) { + var setResponse EmailSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSet, "0", &setResponse) + if err != nil { + return nil, "", err + } + if len(setResponse.NotUpdated) > 0 { + // TODO we don't have composite errors + for _, notUpdated := range setResponse.NotUpdated { + return nil, "", setErrorError(notUpdated, EmailType) + } + } + return setResponse.Updated, setResponse.NewState, nil + }) +} + +func (j *Client) DeleteEmails(accountId string, destroy []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]SetError, SessionState, State, Language, Error) { + cmd, err := j.request(session, logger, + invocation(CommandEmailSet, EmailSetCommand{ + AccountId: accountId, + Destroy: destroy, + }, "0"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]SetError, State, Error) { + var setResponse EmailSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSet, "0", &setResponse) + if err != nil { + return nil, "", err + } + return setResponse.NotDestroyed, setResponse.NewState, nil + }) +} + +type SubmittedEmail struct { + Id string `json:"id"` + SendAt time.Time `json:"sendAt,omitzero"` + ThreadId string `json:"threadId,omitempty"` + UndoStatus EmailSubmissionUndoStatus `json:"undoStatus,omitempty"` + Envelope *Envelope `json:"envelope,omitempty"` + + // A list of blob ids for DSNs [RFC3464] received for this submission, + // in order of receipt, oldest first. + // + // The blob is the whole MIME message (with a top-level content-type of multipart/report), as received. + // + // [RFC3464]: https://datatracker.ietf.org/doc/html/rfc3464 + DsnBlobIds []string `json:"dsnBlobIds,omitempty"` + + // A list of blob ids for MDNs [RFC8098] received for this submission, + // in order of receipt, oldest first. + // + // The blob is the whole MIME message (with a top-level content-type of multipart/report), as received. + // + // [RFC8098]: https://datatracker.ietf.org/doc/html/rfc8098 + MdnBlobIds []string `json:"mdnBlobIds,omitempty"` +} + +type MoveMail struct { + FromMailboxId string + ToMailboxId string +} + +func (j *Client) SubmitEmail(accountId string, identityId string, emailId string, move *MoveMail, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (EmailSubmission, SessionState, State, Language, Error) { + logger = j.logger("SubmitEmail", session, logger) + + update := map[string]any{ + EmailPropertyKeywords + "/" + JmapKeywordDraft: nil, // unmark as draft + EmailPropertyKeywords + "/" + JmapKeywordSeen: true, // mark as seen (read) + } + if move != nil && move.FromMailboxId != "" && move.ToMailboxId != "" && move.FromMailboxId != move.ToMailboxId { + update[EmailPropertyMailboxIds+"/"+move.FromMailboxId] = nil + update[EmailPropertyMailboxIds+"/"+move.ToMailboxId] = true + } + + id := "s0" + + set := EmailSubmissionSetCommand{ + AccountId: accountId, + Create: map[string]EmailSubmissionCreate{ + id: { + IdentityId: identityId, + EmailId: emailId, + Envelope: nil, + }, + }, + OnSuccessUpdateEmail: map[string]PatchObject{ + "#" + id: update, + }, + } + + get := EmailSubmissionGetCommand{ + AccountId: accountId, + Ids: []string{"#" + id}, + } + + cmd, err := j.request(session, logger, + invocation(CommandEmailSubmissionSet, set, "0"), + invocation(CommandEmailSubmissionGet, get, "1"), + ) + if err != nil { + return EmailSubmission{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (EmailSubmission, State, Error) { + var submissionResponse EmailSubmissionSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSubmissionSet, "0", &submissionResponse) + if err != nil { + return EmailSubmission{}, "", err + } + + if len(submissionResponse.NotCreated) > 0 { + // error occured + // TODO(pbleser-oc) handle submission errors + } + + // there is an implicit Email/set response: + // "After all create/update/destroy items in the EmailSubmission/set invocation have been processed, + // a single implicit Email/set call MUST be made to perform any changes requested in these two arguments. + // The response to this MUST be returned after the EmailSubmission/set response." + // from an example in the spec, it has the same tag as the EmailSubmission/set command ("0" in this case) + var setResponse EmailSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSet, "0", &setResponse) + if err != nil { + return EmailSubmission{}, "", err + } + + if len(setResponse.Updated) == 1 { + var getResponse EmailSubmissionGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSubmissionGet, "1", &getResponse) + if err != nil { + return EmailSubmission{}, "", err + } + + if len(getResponse.List) != 1 { + // for some reason (error?)... + // TODO(pbleser-oc) handle absence of emailsubmission + } + + submission := getResponse.List[0] + + return submission, setResponse.NewState, nil + } else { + err = simpleError(fmt.Errorf("failed to submit email: updated is empty"), 0) // TODO proper error handling + return EmailSubmission{}, "", err + } + }) +} + +type emailSubmissionResult struct { + submissions map[string]EmailSubmission + notFound []string +} + +func (j *Client) GetEmailSubmissionStatus(accountId string, submissionIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]EmailSubmission, []string, SessionState, State, Language, Error) { + logger = j.logger("GetEmailSubmissionStatus", session, logger) + + cmd, err := j.request(session, logger, invocation(CommandEmailSubmissionGet, EmailSubmissionGetCommand{ + AccountId: accountId, + Ids: submissionIds, + }, "0")) + if err != nil { + return nil, nil, "", "", "", err + } + + result, sessionState, state, lang, err := command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (emailSubmissionResult, State, Error) { + var response EmailSubmissionGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailSubmissionGet, "0", &response) + if err != nil { + return emailSubmissionResult{}, "", err + } + m := make(map[string]EmailSubmission, len(response.List)) + for _, s := range response.List { + m[s.Id] = s + } + return emailSubmissionResult{submissions: m, notFound: response.NotFound}, response.State, nil + }) + + return result.submissions, result.notFound, sessionState, state, lang, err +} + +func (j *Client) EmailsInThread(accountId string, threadId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, fetchBodies bool, maxBodyValueBytes uint) ([]Email, SessionState, State, Language, Error) { + logger = j.loggerParams("EmailsInThread", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Bool(logFetchBodies, fetchBodies).Str("threadId", log.SafeString(threadId)) + }) + + cmd, err := j.request(session, logger, + invocation(CommandThreadGet, ThreadGetCommand{ + AccountId: accountId, + Ids: []string{threadId}, + }, "0"), + invocation(CommandEmailGet, EmailGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + ResultOf: "0", + Name: CommandThreadGet, + Path: "/list/*/emailIds", + }, + FetchAllBodyValues: fetchBodies, + MaxBodyValueBytes: maxBodyValueBytes, + }, "1"), + ) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) ([]Email, State, Error) { + var emailsResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "1", &emailsResponse) + if err != nil { + return nil, "", err + } + return emailsResponse.List, emailsResponse.State, nil + }) +} + +type EmailsSummary struct { + Emails []Email `json:"emails"` + Total uint `json:"total"` + Limit uint `json:"limit"` + Offset uint `json:"offset"` + State State `json:"state"` +} + +var EmailSummaryProperties = []string{ + EmailPropertyId, + EmailPropertyThreadId, + EmailPropertyMailboxIds, + EmailPropertyKeywords, + EmailPropertySize, + EmailPropertyReceivedAt, + EmailPropertySender, + EmailPropertyFrom, + EmailPropertyTo, + EmailPropertyCc, + EmailPropertyBcc, + EmailPropertySubject, + EmailPropertySentAt, + EmailPropertyHasAttachment, + EmailPropertyAttachments, + EmailPropertyPreview, +} + +func (j *Client) QueryEmailSummaries(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, filter EmailFilterElement, limit uint, withThreads bool) (map[string]EmailsSummary, SessionState, State, Language, Error) { + logger = j.logger("QueryEmailSummaries", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + + factor := 2 + if withThreads { + factor++ + } + + invocations := make([]Invocation, len(uniqueAccountIds)*factor) + for i, accountId := range uniqueAccountIds { + get := EmailQueryCommand{ + AccountId: accountId, + Filter: filter, + Sort: []EmailComparator{{Property: EmailPropertyReceivedAt, IsAscending: false}}, + } + if limit > 0 { + get.Limit = &limit + } + invocations[i*factor+0] = invocation(CommandEmailQuery, get, mcid(accountId, "0")) + + invocations[i*factor+1] = invocation(CommandEmailGet, EmailGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + Name: CommandEmailQuery, + Path: "/ids/*", + ResultOf: mcid(accountId, "0"), + }, + Properties: EmailSummaryProperties, + }, mcid(accountId, "1")) + if withThreads { + invocations[i*factor+2] = invocation(CommandThreadGet, ThreadGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + Name: CommandEmailGet, + Path: "/list/*/" + EmailPropertyThreadId, + ResultOf: mcid(accountId, "1"), + }, + }, mcid(accountId, "2")) + } + } + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]EmailsSummary, State, Error) { + resp := map[string]EmailsSummary{} + for _, accountId := range uniqueAccountIds { + var queryResponse EmailQueryResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailQuery, mcid(accountId, "0"), &queryResponse) + if err != nil { + return nil, "", err + } + + var response EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, mcid(accountId, "1"), &response) + if err != nil { + return nil, "", err + } + if len(response.NotFound) > 0 { + // TODO what to do when there are not-found emails here? potentially nothing, they could have been deleted between query and get? + } + if withThreads { + var thread ThreadGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandThreadGet, mcid(accountId, "2"), &thread) + if err != nil { + return nil, "", err + } + setThreadSize(&thread, response.List) + } + + resp[accountId] = EmailsSummary{ + Emails: response.List, + Total: queryResponse.Total, + Limit: queryResponse.Limit, + Offset: queryResponse.Position, + State: response.State, + } + } + return resp, squashStateFunc(resp, func(s EmailsSummary) State { return s.State }), nil + }) +} + +func setThreadSize(threads *ThreadGetResponse, emails []Email) { + threadSizeById := make(map[string]int, len(threads.List)) + for _, thread := range threads.List { + threadSizeById[thread.Id] = len(thread.EmailIds) + } + for i := range len(emails) { + ts, ok := threadSizeById[emails[i].ThreadId] + if !ok { + ts = 1 + } + emails[i].ThreadSize = ts + } +} diff --git a/pkg/jmap/jmap_api_identity.go b/pkg/jmap/jmap_api_identity.go new file mode 100644 index 0000000000..3eabf7abf5 --- /dev/null +++ b/pkg/jmap/jmap_api_identity.go @@ -0,0 +1,208 @@ +package jmap + +import ( + "context" + "strconv" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +func (j *Client) GetAllIdentities(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) ([]Identity, SessionState, State, Language, Error) { + logger = j.logger("GetAllIdentities", session, logger) + cmd, err := j.request(session, logger, invocation(CommandIdentityGet, IdentityGetCommand{AccountId: accountId}, "0")) + if err != nil { + return nil, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) ([]Identity, State, Error) { + var response IdentityGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentityGet, "0", &response) + if err != nil { + return nil, "", err + } + return response.List, response.State, nil + }) +} + +func (j *Client) GetIdentities(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, identityIds []string) ([]Identity, SessionState, State, Language, Error) { + logger = j.logger("GetIdentities", session, logger) + cmd, err := j.request(session, logger, invocation(CommandIdentityGet, IdentityGetCommand{AccountId: accountId, Ids: identityIds}, "0")) + if err != nil { + return nil, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) ([]Identity, State, Error) { + var response IdentityGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentityGet, "0", &response) + if err != nil { + return nil, "", err + } + return response.List, response.State, nil + }) +} + +type IdentitiesGetResponse struct { + Identities map[string][]Identity `json:"identities,omitempty"` + NotFound []string `json:"notFound,omitempty"` +} + +func (j *Client) GetIdentitiesForAllAccounts(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (IdentitiesGetResponse, SessionState, State, Language, Error) { + logger = j.logger("GetIdentitiesForAllAccounts", session, logger) + uniqueAccountIds := structs.Uniq(accountIds) + calls := make([]Invocation, len(uniqueAccountIds)) + for i, accountId := range uniqueAccountIds { + calls[i] = invocation(CommandIdentityGet, IdentityGetCommand{AccountId: accountId}, strconv.Itoa(i)) + } + + cmd, err := j.request(session, logger, calls...) + if err != nil { + return IdentitiesGetResponse{}, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (IdentitiesGetResponse, State, Error) { + identities := make(map[string][]Identity, len(uniqueAccountIds)) + stateByAccountId := make(map[string]State, len(uniqueAccountIds)) + notFound := []string{} + for i, accountId := range uniqueAccountIds { + var response IdentityGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentityGet, strconv.Itoa(i), &response) + if err != nil { + return IdentitiesGetResponse{}, "", err + } else { + identities[accountId] = response.List + } + stateByAccountId[accountId] = response.State + notFound = append(notFound, response.NotFound...) + } + + return IdentitiesGetResponse{ + Identities: identities, + NotFound: structs.Uniq(notFound), + }, squashState(stateByAccountId), nil + }) +} + +type IdentitiesAndMailboxesGetResponse struct { + Identities map[string][]Identity `json:"identities,omitempty"` + NotFound []string `json:"notFound,omitempty"` + Mailboxes []Mailbox `json:"mailboxes"` +} + +func (j *Client) GetIdentitiesAndMailboxes(mailboxAccountId string, accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (IdentitiesAndMailboxesGetResponse, SessionState, State, Language, Error) { + uniqueAccountIds := structs.Uniq(accountIds) + + logger = j.logger("GetIdentitiesAndMailboxes", session, logger) + + calls := make([]Invocation, len(uniqueAccountIds)+1) + calls[0] = invocation(CommandMailboxGet, MailboxGetCommand{AccountId: mailboxAccountId}, "0") + for i, accountId := range uniqueAccountIds { + calls[i+1] = invocation(CommandIdentityGet, IdentityGetCommand{AccountId: accountId}, strconv.Itoa(i+1)) + } + + cmd, err := j.request(session, logger, calls...) + if err != nil { + return IdentitiesAndMailboxesGetResponse{}, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (IdentitiesAndMailboxesGetResponse, State, Error) { + identities := make(map[string][]Identity, len(uniqueAccountIds)) + stateByAccountId := make(map[string]State, len(uniqueAccountIds)) + notFound := []string{} + for i, accountId := range uniqueAccountIds { + var response IdentityGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentityGet, strconv.Itoa(i+1), &response) + if err != nil { + return IdentitiesAndMailboxesGetResponse{}, "", err + } else { + identities[accountId] = response.List + } + stateByAccountId[accountId] = response.State + notFound = append(notFound, response.NotFound...) + } + + var mailboxResponse MailboxGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxGet, "0", &mailboxResponse) + if err != nil { + return IdentitiesAndMailboxesGetResponse{}, "", err + } + + return IdentitiesAndMailboxesGetResponse{ + Identities: identities, + NotFound: structs.Uniq(notFound), + Mailboxes: mailboxResponse.List, + }, squashState(stateByAccountId), nil + }) +} + +func (j *Client) CreateIdentity(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, identity Identity) (Identity, SessionState, State, Language, Error) { + logger = j.logger("CreateIdentity", session, logger) + cmd, err := j.request(session, logger, invocation(CommandIdentitySet, IdentitySetCommand{ + AccountId: accountId, + Create: map[string]Identity{ + "c": identity, + }, + }, "0")) + if err != nil { + return Identity{}, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (Identity, State, Error) { + var response IdentitySetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentitySet, "0", &response) + if err != nil { + return Identity{}, response.NewState, err + } + setErr, notok := response.NotCreated["c"] + if notok { + logger.Error().Msgf("%T.NotCreated returned an error %v", response, setErr) + return Identity{}, "", setErrorError(setErr, IdentityType) + } + return response.Created["c"], response.NewState, nil + }) +} + +func (j *Client) UpdateIdentity(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, identity Identity) (Identity, SessionState, State, Language, Error) { + logger = j.logger("UpdateIdentity", session, logger) + cmd, err := j.request(session, logger, invocation(CommandIdentitySet, IdentitySetCommand{ + AccountId: accountId, + Update: map[string]PatchObject{ + "c": identity.AsPatch(), + }, + }, "0")) + if err != nil { + return Identity{}, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (Identity, State, Error) { + var response IdentitySetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentitySet, "0", &response) + if err != nil { + return Identity{}, response.NewState, err + } + setErr, notok := response.NotCreated["c"] + if notok { + logger.Error().Msgf("%T.NotCreated returned an error %v", response, setErr) + return Identity{}, "", setErrorError(setErr, IdentityType) + } + return response.Created["c"], response.NewState, nil + }) +} + +func (j *Client) DeleteIdentity(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ids []string) ([]string, SessionState, State, Language, Error) { + logger = j.logger("DeleteIdentity", session, logger) + cmd, err := j.request(session, logger, invocation(CommandIdentitySet, IdentitySetCommand{ + AccountId: accountId, + Destroy: ids, + }, "0")) + if err != nil { + return nil, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) ([]string, State, Error) { + var response IdentitySetResponse + err = retrieveResponseMatchParameters(logger, body, CommandIdentitySet, "0", &response) + if err != nil { + return nil, "", err + } + for _, setErr := range response.NotDestroyed { + // TODO only returning the first error here, we should probably aggregate them instead + logger.Error().Msgf("%T.NotCreated returned an error %v", response, setErr) + return nil, "", setErrorError(setErr, IdentityType) + } + return response.Destroyed, response.NewState, nil + }) +} diff --git a/pkg/jmap/jmap_api_mailbox.go b/pkg/jmap/jmap_api_mailbox.go new file mode 100644 index 0000000000..4b18829e11 --- /dev/null +++ b/pkg/jmap/jmap_api_mailbox.go @@ -0,0 +1,518 @@ +package jmap + +import ( + "context" + "fmt" + "slices" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" + "github.com/rs/zerolog" +) + +type MailboxesResponse struct { + Mailboxes []Mailbox `json:"mailboxes"` + NotFound []any `json:"notFound"` +} + +// https://jmap.io/spec-mail.html#mailboxget +func (j *Client) GetMailbox(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ids []string) (MailboxesResponse, SessionState, State, Language, Error) { + logger = j.logger("GetMailbox", session, logger) + + cmd, err := j.request(session, logger, + invocation(CommandMailboxGet, MailboxGetCommand{AccountId: accountId, Ids: ids}, "0"), + ) + if err != nil { + return MailboxesResponse{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (MailboxesResponse, State, Error) { + var response MailboxGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxGet, "0", &response) + if err != nil { + return MailboxesResponse{}, "", err + } + return MailboxesResponse{ + Mailboxes: response.List, + NotFound: response.NotFound, + }, response.State, nil + }) +} + +func (j *Client) GetAllMailboxes(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string][]Mailbox, SessionState, State, Language, Error) { + logger = j.logger("GetAllMailboxes", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + n := len(uniqueAccountIds) + if n < 1 { + return nil, "", "", "", nil + } + + invocations := make([]Invocation, n) + for i, accountId := range uniqueAccountIds { + invocations[i] = invocation(CommandMailboxGet, MailboxGetCommand{AccountId: accountId}, mcid(accountId, "0")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string][]Mailbox, State, Error) { + resp := map[string][]Mailbox{} + stateByAccountid := map[string]State{} + for _, accountId := range uniqueAccountIds { + var response MailboxGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxGet, mcid(accountId, "0"), &response) + if err != nil { + return nil, "", err + } + + resp[accountId] = response.List + stateByAccountid[accountId] = response.State + } + return resp, squashState(stateByAccountid), nil + }) +} + +func (j *Client) SearchMailboxes(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, filter MailboxFilterElement) (map[string][]Mailbox, SessionState, State, Language, Error) { + logger = j.logger("SearchMailboxes", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + + invocations := make([]Invocation, len(uniqueAccountIds)*2) + for i, accountId := range uniqueAccountIds { + invocations[i*2+0] = invocation(CommandMailboxQuery, MailboxQueryCommand{AccountId: accountId, Filter: filter}, mcid(accountId, "0")) + invocations[i*2+1] = invocation(CommandMailboxGet, MailboxGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + Name: CommandMailboxQuery, + Path: "/ids/*", + ResultOf: mcid(accountId, "0"), + }, + }, mcid(accountId, "1")) + } + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string][]Mailbox, State, Error) { + resp := map[string][]Mailbox{} + stateByAccountid := map[string]State{} + for _, accountId := range uniqueAccountIds { + var response MailboxGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxGet, mcid(accountId, "1"), &response) + if err != nil { + return nil, "", err + } + + resp[accountId] = response.List + stateByAccountid[accountId] = response.State + } + return resp, squashState(stateByAccountid), nil + }) +} + +func (j *Client) SearchMailboxIdsPerRole(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, roles []string) (map[string]map[string]string, SessionState, State, Language, Error) { + logger = j.logger("SearchMailboxIdsPerRole", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + + invocations := make([]Invocation, len(uniqueAccountIds)*len(roles)) + for i, accountId := range uniqueAccountIds { + for j, role := range roles { + invocations[i*len(roles)+j] = invocation(CommandMailboxQuery, MailboxQueryCommand{AccountId: accountId, Filter: MailboxFilterCondition{Role: role}}, mcid(accountId, role)) + } + } + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]map[string]string, State, Error) { + resp := map[string]map[string]string{} + stateByAccountid := map[string]State{} + for _, accountId := range uniqueAccountIds { + mailboxIdsByRole := map[string]string{} + for _, role := range roles { + var response MailboxQueryResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxQuery, mcid(accountId, role), &response) + if err != nil { + return nil, "", err + } + if len(response.Ids) == 1 { + mailboxIdsByRole[role] = response.Ids[0] + } + if _, ok := stateByAccountid[accountId]; !ok { + stateByAccountid[accountId] = response.QueryState + } + } + resp[accountId] = mailboxIdsByRole + } + return resp, squashState(stateByAccountid), nil + }) +} + +type MailboxChanges struct { + Destroyed []string `json:"destroyed,omitzero"` + HasMoreChanges bool `json:"hasMoreChanges,omitzero"` + NewState State `json:"newState"` + Created []Email `json:"created,omitempty"` + Updated []Email `json:"updated,omitempty"` +} + +// Retrieve Email changes in a given Mailbox since a given state. +func (j *Client) GetMailboxChanges(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, mailboxId string, sinceState string, fetchBodies bool, maxBodyValueBytes uint, maxChanges uint) (MailboxChanges, SessionState, State, Language, Error) { + logger = j.loggerParams("GetMailboxChanges", session, logger, func(z zerolog.Context) zerolog.Context { + return z.Bool(logFetchBodies, fetchBodies).Str(logSinceState, sinceState) + }) + + changes := MailboxChangesCommand{ + AccountId: accountId, + SinceState: sinceState, + } + if maxChanges > 0 { + changes.MaxChanges = maxChanges + } + + getCreated := EmailGetRefCommand{ + AccountId: accountId, + FetchAllBodyValues: fetchBodies, + IdsRef: &ResultReference{Name: CommandMailboxChanges, Path: "/created", ResultOf: "0"}, + } + if maxBodyValueBytes > 0 { + getCreated.MaxBodyValueBytes = maxBodyValueBytes + } + getUpdated := EmailGetRefCommand{ + AccountId: accountId, + FetchAllBodyValues: fetchBodies, + IdsRef: &ResultReference{Name: CommandMailboxChanges, Path: "/updated", ResultOf: "0"}, + } + if maxBodyValueBytes > 0 { + getUpdated.MaxBodyValueBytes = maxBodyValueBytes + } + + cmd, err := j.request(session, logger, + invocation(CommandMailboxChanges, changes, "0"), + invocation(CommandEmailGet, getCreated, "1"), + invocation(CommandEmailGet, getUpdated, "2"), + ) + if err != nil { + return MailboxChanges{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (MailboxChanges, State, Error) { + var mailboxResponse MailboxChangesResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxChanges, "0", &mailboxResponse) + if err != nil { + return MailboxChanges{}, "", err + } + + var createdResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "1", &createdResponse) + if err != nil { + logger.Error().Err(err).Send() + return MailboxChanges{}, "", err + } + + var updatedResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, "2", &updatedResponse) + if err != nil { + logger.Error().Err(err).Send() + return MailboxChanges{}, "", err + } + + return MailboxChanges{ + Destroyed: mailboxResponse.Destroyed, + HasMoreChanges: mailboxResponse.HasMoreChanges, + NewState: mailboxResponse.NewState, + Created: createdResponse.List, + Updated: createdResponse.List, + }, createdResponse.State, nil + }) +} + +// Retrieve Email changes in Mailboxes of multiple Accounts. +func (j *Client) GetMailboxChangesForMultipleAccounts(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, sinceStateMap map[string]string, fetchBodies bool, maxBodyValueBytes uint, maxChanges uint) (map[string]MailboxChanges, SessionState, State, Language, Error) { + logger = j.loggerParams("GetMailboxChangesForMultipleAccounts", session, logger, func(z zerolog.Context) zerolog.Context { + sinceStateLogDict := zerolog.Dict() + for k, v := range sinceStateMap { + sinceStateLogDict.Str(log.SafeString(k), log.SafeString(v)) + } + return z.Bool(logFetchBodies, fetchBodies).Dict(logSinceState, sinceStateLogDict) + }) + + uniqueAccountIds := structs.Uniq(accountIds) + n := len(uniqueAccountIds) + if n < 1 { + return map[string]MailboxChanges{}, "", "", "", nil + } + + invocations := make([]Invocation, n*3) + for i, accountId := range uniqueAccountIds { + changes := MailboxChangesCommand{ + AccountId: accountId, + } + + sinceState, ok := sinceStateMap[accountId] + if ok { + changes.SinceState = sinceState + } + + if maxChanges > 0 { + changes.MaxChanges = maxChanges + } + + getCreated := EmailGetRefCommand{ + AccountId: accountId, + FetchAllBodyValues: fetchBodies, + IdsRef: &ResultReference{Name: CommandMailboxChanges, Path: "/created", ResultOf: mcid(accountId, "0")}, + } + if maxBodyValueBytes > 0 { + getCreated.MaxBodyValueBytes = maxBodyValueBytes + } + getUpdated := EmailGetRefCommand{ + AccountId: accountId, + FetchAllBodyValues: fetchBodies, + IdsRef: &ResultReference{Name: CommandMailboxChanges, Path: "/updated", ResultOf: mcid(accountId, "0")}, + } + if maxBodyValueBytes > 0 { + getUpdated.MaxBodyValueBytes = maxBodyValueBytes + } + + invocations[i*3+0] = invocation(CommandMailboxChanges, changes, mcid(accountId, "0")) + invocations[i*3+1] = invocation(CommandEmailGet, getCreated, mcid(accountId, "1")) + invocations[i*3+2] = invocation(CommandEmailGet, getUpdated, mcid(accountId, "2")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]MailboxChanges, State, Error) { + resp := make(map[string]MailboxChanges, n) + stateByAccountId := make(map[string]State, n) + for _, accountId := range uniqueAccountIds { + var mailboxResponse MailboxChangesResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxChanges, mcid(accountId, "0"), &mailboxResponse) + if err != nil { + return nil, "", err + } + + var createdResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, mcid(accountId, "1"), &createdResponse) + if err != nil { + return nil, "", err + } + + var updatedResponse EmailGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandEmailGet, mcid(accountId, "2"), &updatedResponse) + if err != nil { + return nil, "", err + } + + resp[accountId] = MailboxChanges{ + Destroyed: mailboxResponse.Destroyed, + HasMoreChanges: mailboxResponse.HasMoreChanges, + NewState: mailboxResponse.NewState, + Created: createdResponse.List, + Updated: createdResponse.List, + } + stateByAccountId[accountId] = createdResponse.State + } + + return resp, squashState(stateByAccountId), nil + }) +} + +func (j *Client) GetMailboxRolesForMultipleAccounts(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string][]string, SessionState, State, Language, Error) { + logger = j.logger("GetMailboxRolesForMultipleAccounts", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + n := len(uniqueAccountIds) + if n < 1 { + return nil, "", "", "", nil + } + + t := true + + invocations := make([]Invocation, n*2) + for i, accountId := range uniqueAccountIds { + invocations[i*2+0] = invocation(CommandMailboxQuery, MailboxQueryCommand{ + AccountId: accountId, + Filter: MailboxFilterCondition{ + HasAnyRole: &t, + }, + }, mcid(accountId, "0")) + invocations[i*2+1] = invocation(CommandMailboxGet, MailboxGetRefCommand{ + AccountId: accountId, + IdsRef: &ResultReference{ + ResultOf: mcid(accountId, "0"), + Name: CommandMailboxQuery, + Path: "/ids", + }, + }, mcid(accountId, "1")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string][]string, State, Error) { + resp := make(map[string][]string, n) + stateByAccountId := make(map[string]State, n) + for _, accountId := range uniqueAccountIds { + var getResponse MailboxGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxGet, mcid(accountId, "1"), &getResponse) + if err != nil { + return nil, "", err + } + roles := make([]string, len(getResponse.List)) + for i, mailbox := range getResponse.List { + roles[i] = mailbox.Role + } + slices.Sort(roles) + resp[accountId] = roles + stateByAccountId[accountId] = getResponse.State + } + return resp, squashState(stateByAccountId), nil + }) +} + +func (j *Client) GetInboxNameForMultipleAccounts(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]string, SessionState, State, Language, Error) { + logger = j.logger("GetInboxNameForMultipleAccounts", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + n := len(uniqueAccountIds) + if n < 1 { + return nil, "", "", "", nil + } + + invocations := make([]Invocation, n*2) + for i, accountId := range uniqueAccountIds { + invocations[i*2+0] = invocation(CommandMailboxQuery, MailboxQueryCommand{ + AccountId: accountId, + Filter: MailboxFilterCondition{ + Role: JmapMailboxRoleInbox, + }, + }, mcid(accountId, "0")) + } + + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]string, State, Error) { + resp := make(map[string]string, n) + stateByAccountId := make(map[string]State, n) + for _, accountId := range uniqueAccountIds { + var r MailboxQueryResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxGet, mcid(accountId, "0"), &r) + if err != nil { + return nil, "", err + } + switch len(r.Ids) { + case 0: + // skip: account has no inbox? + case 1: + resp[accountId] = r.Ids[0] + stateByAccountId[accountId] = r.QueryState + default: + logger.Warn().Msgf("multiple ids for mailbox role='%v' for accountId='%v'", JmapMailboxRoleInbox, accountId) + resp[accountId] = r.Ids[0] + stateByAccountId[accountId] = r.QueryState + } + } + return resp, squashState(stateByAccountId), nil + }) +} + +func (j *Client) UpdateMailbox(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, mailboxId string, ifInState string, update MailboxChange) (Mailbox, SessionState, State, Language, Error) { + logger = j.logger("UpdateMailbox", session, logger) + cmd, err := j.request(session, logger, invocation(CommandMailboxSet, MailboxSetCommand{ + AccountId: accountId, + IfInState: ifInState, + Update: map[string]PatchObject{ + mailboxId: update.AsPatch(), + }, + }, "0")) + if err != nil { + return Mailbox{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (Mailbox, State, Error) { + var setResp MailboxSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxSet, "0", &setResp) + if err != nil { + return Mailbox{}, "", err + } + setErr, notok := setResp.NotUpdated["u"] + if notok { + logger.Error().Msgf("%T.NotUpdated returned an error %v", setResp, setErr) + return Mailbox{}, "", setErrorError(setErr, MailboxType) + } + return setResp.Updated["c"], setResp.NewState, nil + }) +} + +func (j *Client) CreateMailbox(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ifInState string, create MailboxChange) (Mailbox, SessionState, State, Language, Error) { + logger = j.logger("CreateMailbox", session, logger) + cmd, err := j.request(session, logger, invocation(CommandMailboxSet, MailboxSetCommand{ + AccountId: accountId, + IfInState: ifInState, + Create: map[string]MailboxChange{ + "c": create, + }, + }, "0")) + if err != nil { + return Mailbox{}, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (Mailbox, State, Error) { + var setResp MailboxSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxSet, "0", &setResp) + if err != nil { + return Mailbox{}, "", err + } + setErr, notok := setResp.NotCreated["c"] + if notok { + logger.Error().Msgf("%T.NotCreated returned an error %v", setResp, setErr) + return Mailbox{}, "", setErrorError(setErr, MailboxType) + } + if mailbox, ok := setResp.Created["c"]; ok { + return mailbox, setResp.NewState, nil + } else { + return Mailbox{}, "", simpleError(fmt.Errorf("failed to find created %T in response", Mailbox{}), JmapErrorMissingCreatedObject) + } + }) +} + +func (j *Client) DeleteMailboxes(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string, ifInState string, mailboxIds []string) ([]string, SessionState, State, Language, Error) { + logger = j.logger("DeleteMailbox", session, logger) + cmd, err := j.request(session, logger, invocation(CommandMailboxSet, MailboxSetCommand{ + AccountId: accountId, + IfInState: ifInState, + Destroy: mailboxIds, + }, "0")) + if err != nil { + return nil, "", "", "", err + } + + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) ([]string, State, Error) { + var setResp MailboxSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandMailboxSet, "0", &setResp) + if err != nil { + return nil, "", err + } + setErr, notok := setResp.NotUpdated["u"] + if notok { + logger.Error().Msgf("%T.NotUpdated returned an error %v", setResp, setErr) + return nil, "", setErrorError(setErr, MailboxType) + } + return setResp.Destroyed, setResp.NewState, nil + }) +} diff --git a/pkg/jmap/jmap_api_quota.go b/pkg/jmap/jmap_api_quota.go new file mode 100644 index 0000000000..14ba53dc83 --- /dev/null +++ b/pkg/jmap/jmap_api_quota.go @@ -0,0 +1,35 @@ +package jmap + +import ( + "context" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +func (j *Client) GetQuotas(accountIds []string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (map[string]QuotaGetResponse, SessionState, State, Language, Error) { + logger = j.logger("GetQuotas", session, logger) + + uniqueAccountIds := structs.Uniq(accountIds) + + invocations := make([]Invocation, len(uniqueAccountIds)) + for i, accountId := range uniqueAccountIds { + invocations[i] = invocation(CommandQuotaGet, MailboxQueryCommand{AccountId: accountId}, mcid(accountId, "0")) + } + cmd, err := j.request(session, logger, invocations...) + if err != nil { + return nil, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (map[string]QuotaGetResponse, State, Error) { + result := map[string]QuotaGetResponse{} + for _, accountId := range uniqueAccountIds { + var response QuotaGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandQuotaGet, mcid(accountId, "0"), &response) + if err != nil { + return nil, "", err + } + result[accountId] = response + } + return result, squashStateFunc(result, func(q QuotaGetResponse) State { return q.State }), nil + }) +} diff --git a/pkg/jmap/jmap_api_vacation.go b/pkg/jmap/jmap_api_vacation.go new file mode 100644 index 0000000000..ef553e5de8 --- /dev/null +++ b/pkg/jmap/jmap_api_vacation.go @@ -0,0 +1,108 @@ +package jmap + +import ( + "context" + "fmt" + "time" + + "github.com/opencloud-eu/opencloud/pkg/log" +) + +const ( + vacationResponseId = "singleton" +) + +// https://jmap.io/spec-mail.html#vacationresponseget +func (j *Client) GetVacationResponse(accountId string, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (VacationResponseGetResponse, SessionState, State, Language, Error) { + logger = j.logger("GetVacationResponse", session, logger) + cmd, err := j.request(session, logger, invocation(CommandVacationResponseGet, VacationResponseGetCommand{AccountId: accountId}, "0")) + if err != nil { + return VacationResponseGetResponse{}, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (VacationResponseGetResponse, State, Error) { + var response VacationResponseGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandVacationResponseGet, "0", &response) + if err != nil { + return VacationResponseGetResponse{}, "", err + } + return response, response.State, nil + }) +} + +// Same as VacationResponse but without the id. +type VacationResponsePayload struct { + // Should a vacation response be sent if a message arrives between the "fromDate" and "toDate"? + IsEnabled bool `json:"isEnabled"` + // If "isEnabled" is true, messages that arrive on or after this date-time (but before the "toDate" if defined) should receive the + // user's vacation response. If null, the vacation response is effective immediately. + FromDate time.Time `json:"fromDate,omitzero"` + // If "isEnabled" is true, messages that arrive before this date-time but on or after the "fromDate" if defined) should receive the + // user's vacation response. If null, the vacation response is effective indefinitely. + ToDate time.Time `json:"toDate,omitzero"` + // The subject that will be used by the message sent in response to messages when the vacation response is enabled. + // If null, an appropriate subject SHOULD be set by the server. + Subject string `json:"subject,omitempty"` + // The plaintext body to send in response to messages when the vacation response is enabled. + // If this is null, the server SHOULD generate a plaintext body part from the "htmlBody" when sending vacation responses + // but MAY choose to send the response as HTML only. If both "textBody" and "htmlBody" are null, an appropriate default + // body SHOULD be generated for responses by the server. + TextBody string `json:"textBody,omitempty"` + // The HTML body to send in response to messages when the vacation response is enabled. + // If this is null, the server MAY choose to generate an HTML body part from the "textBody" when sending vacation responses + // or MAY choose to send the response as plaintext only. + HtmlBody string `json:"htmlBody,omitempty"` +} + +func (j *Client) SetVacationResponse(accountId string, vacation VacationResponsePayload, session *Session, ctx context.Context, logger *log.Logger, acceptLanguage string) (VacationResponse, SessionState, State, Language, Error) { + logger = j.logger("SetVacationResponse", session, logger) + + cmd, err := j.request(session, logger, + invocation(CommandVacationResponseSet, VacationResponseSetCommand{ + AccountId: accountId, + Create: map[string]VacationResponse{ + vacationResponseId: { + IsEnabled: vacation.IsEnabled, + FromDate: vacation.FromDate, + ToDate: vacation.ToDate, + Subject: vacation.Subject, + TextBody: vacation.TextBody, + HtmlBody: vacation.HtmlBody, + }, + }, + }, "0"), + // chain a second request to get the current complete VacationResponse object + // after performing the changes, as that makes for a better API + invocation(CommandVacationResponseGet, VacationResponseGetCommand{AccountId: accountId}, "1"), + ) + if err != nil { + return VacationResponse{}, "", "", "", err + } + return command(j.api, logger, ctx, session, j.onSessionOutdated, cmd, acceptLanguage, func(body *Response) (VacationResponse, State, Error) { + var setResponse VacationResponseSetResponse + err = retrieveResponseMatchParameters(logger, body, CommandVacationResponseSet, "0", &setResponse) + if err != nil { + return VacationResponse{}, "", err + } + + setErr, notok := setResponse.NotCreated[vacationResponseId] + if notok { + // this means that the VacationResponse was not updated + logger.Error().Msgf("%T.NotCreated contains an error: %v", setResponse, setErr) + return VacationResponse{}, "", setErrorError(setErr, VacationResponseType) + } + + var getResponse VacationResponseGetResponse + err = retrieveResponseMatchParameters(logger, body, CommandVacationResponseGet, "1", &getResponse) + if err != nil { + return VacationResponse{}, "", err + } + + if len(getResponse.List) != 1 { + berr := fmt.Errorf("failed to find %s in %s response", string(VacationResponseType), string(CommandVacationResponseGet)) + logger.Error().Msg(berr.Error()) + return VacationResponse{}, "", simpleError(berr, JmapErrorInvalidJmapResponsePayload) + } + + return getResponse.List[0], setResponse.NewState, nil + }) +} diff --git a/pkg/jmap/jmap_api_ws.go b/pkg/jmap/jmap_api_ws.go new file mode 100644 index 0000000000..3af91c454f --- /dev/null +++ b/pkg/jmap/jmap_api_ws.go @@ -0,0 +1,9 @@ +package jmap + +func (j *Client) EnablePushNotifications(pushState State, sessionProvider func() (*Session, error)) (WsClient, error) { + return j.ws.EnableNotifications(pushState, sessionProvider, j) +} + +func (j *Client) AddWsPushListener(listener WsPushListener) { + j.wsPushListeners.add(listener) +} diff --git a/pkg/jmap/jmap_client.go b/pkg/jmap/jmap_client.go new file mode 100644 index 0000000000..b16d4a012d --- /dev/null +++ b/pkg/jmap/jmap_client.go @@ -0,0 +1,103 @@ +package jmap + +import ( + "errors" + "io" + "net/url" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/rs/zerolog" +) + +type Client struct { + session SessionClient + api ApiClient + blob BlobClient + ws WsClientFactory + sessionEventListeners *eventListeners[SessionEventListener] + wsPushListeners *eventListeners[WsPushListener] + io.Closer + WsPushListener +} + +var _ io.Closer = &Client{} +var _ WsPushListener = &Client{} + +func (j *Client) Close() error { + return errors.Join(j.api.Close(), j.session.Close(), j.blob.Close(), j.ws.Close()) +} + +func NewClient(session SessionClient, api ApiClient, blob BlobClient, ws WsClientFactory) Client { + return Client{ + session: session, + api: api, + blob: blob, + ws: ws, + sessionEventListeners: newEventListeners[SessionEventListener](), + wsPushListeners: newEventListeners[WsPushListener](), + } +} + +func (j *Client) AddSessionEventListener(listener SessionEventListener) { + j.sessionEventListeners.add(listener) +} + +func (j *Client) onSessionOutdated(session *Session, newSessionState SessionState) { + j.sessionEventListeners.signal(func(listener SessionEventListener) { + listener.OnSessionOutdated(session, newSessionState) + }) +} + +func (j *Client) OnNotification(username string, stateChange StateChange) { + j.wsPushListeners.signal(func(listener WsPushListener) { + listener.OnNotification(username, stateChange) + }) +} + +// Retrieve JMAP well-known data from the Stalwart server and create a Session from that. +func (j *Client) FetchSession(sessionUrl *url.URL, username string, logger *log.Logger) (Session, Error) { + wk, err := j.session.GetSession(sessionUrl, username, logger) + if err != nil { + return Session{}, err + } + return newSession(wk) +} + +func (j *Client) logger(operation string, _ *Session, logger *log.Logger) *log.Logger { + l := logger.With().Str(logOperation, operation) + return log.From(l) +} + +func (j *Client) loggerParams(operation string, _ *Session, logger *log.Logger, params func(zerolog.Context) zerolog.Context) *log.Logger { + l := logger.With().Str(logOperation, operation) + if params != nil { + l = params(l) + } + return log.From(l) +} + +func (j *Client) maxCallsCheck(calls int, session *Session, logger *log.Logger) Error { + if calls > session.Capabilities.Core.MaxCallsInRequest { + logger.Warn(). + Int("max-calls-in-request", session.Capabilities.Core.MaxCallsInRequest). + Int("calls-in-request", calls). + Msgf("number of calls in request payload (%d) would exceed the allowed maximum (%d)", session.Capabilities.Core.MaxCallsInRequest, calls) + return simpleError(errTooManyMethodCalls, JmapErrorTooManyMethodCalls) + } + return nil +} + +// Construct a Request from the given list of Invocation objects. +// +// If an issue occurs, then it is logged prior to returning it. +func (j *Client) request(session *Session, logger *log.Logger, methodCalls ...Invocation) (Request, Error) { + err := j.maxCallsCheck(len(methodCalls), session, logger) + if err != nil { + return Request{}, err + } + return Request{ + Using: []string{JmapCore, JmapMail, JmapContacts}, + MethodCalls: methodCalls, + CreatedIds: nil, + }, nil +} diff --git a/pkg/jmap/jmap_error.go b/pkg/jmap/jmap_error.go new file mode 100644 index 0000000000..d5d9b5c6d1 --- /dev/null +++ b/pkg/jmap/jmap_error.go @@ -0,0 +1,89 @@ +package jmap + +import ( + "errors" + "fmt" + "strings" +) + +const ( + JmapErrorAuthenticationFailed = iota + JmapErrorInvalidHttpRequest + JmapErrorServerResponse + JmapErrorReadingResponseBody + JmapErrorDecodingResponseBody + JmapErrorEncodingRequestBody + JmapErrorCreatingRequest + JmapErrorSendingRequest + JmapErrorInvalidSessionResponse + JmapErrorInvalidJmapRequestPayload + JmapErrorInvalidJmapResponsePayload + JmapErrorSetError + JmapErrorTooManyMethodCalls + JmapErrorUnspecifiedType + JmapErrorServerUnavailable + JmapErrorServerFail + JmapErrorUnknownMethod + JmapErrorInvalidArguments + JmapErrorInvalidResultReference + JmapErrorForbidden + JmapErrorAccountNotFound + JmapErrorAccountNotSupportedByMethod + JmapErrorAccountReadOnly + JmapErrorFailedToEstablishWssConnection + JmapErrorWssConnectionResponseMissingJmapSubprotocol + JmapErrorWssFailedToSendWebSocketPushEnable + JmapErrorWssFailedToSendWebSocketPushDisable + JmapErrorWssFailedToClose + JmapErrorWssFailedToRetrieveSession + JmapErrorSocketPushUnsupported + JmapErrorMissingCreatedObject +) + +var ( + errTooManyMethodCalls = errors.New("the amount of methodCalls in the request body would exceed the maximum that is configured in the session") +) + +type Error interface { + Code() int + error +} + +type SimpleError struct { + code int + err error +} + +var _ Error = &SimpleError{} + +func (e SimpleError) Code() int { + return e.code +} +func (e SimpleError) Unwrap() error { + return e.err +} +func (e SimpleError) Error() string { + if e.err != nil { + return e.err.Error() + } else { + return "" + } +} + +func simpleError(err error, code int) Error { + if err != nil { + return SimpleError{code: code, err: err} + } else { + return nil + } +} + +func setErrorError(err SetError, objectType ObjectType) Error { + var e error + if len(err.Properties) > 0 { + e = fmt.Errorf("failed to modify %s due to %s error in properties [%s]: %s", objectType, err.Type, strings.Join(err.Properties, ", "), err.Description) + } else { + e = fmt.Errorf("failed to modify %s due to %s error: %s", objectType, err.Type, err.Description) + } + return SimpleError{code: JmapErrorSetError, err: e} +} diff --git a/pkg/jmap/jmap_http.go b/pkg/jmap/jmap_http.go new file mode 100644 index 0000000000..a7851f8c12 --- /dev/null +++ b/pkg/jmap/jmap_http.go @@ -0,0 +1,651 @@ +package jmap + +import ( + "bytes" + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/http/httputil" + "net/url" + "slices" + "strconv" + + "github.com/gorilla/websocket" + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/version" +) + +// Implementation of ApiClient, SessionClient and BlobClient that uses +// HTTP to perform JMAP operations. +type HttpJmapClient struct { + client *http.Client + masterUser string + masterPassword string + userAgent string + listener HttpJmapApiClientEventListener +} + +var ( + _ ApiClient = &HttpJmapClient{} + _ SessionClient = &HttpJmapClient{} + _ BlobClient = &HttpJmapClient{} +) + +const ( + logEndpoint = "endpoint" + logHttpStatus = "status" + logHttpStatusCode = "status-code" + logHttpUrl = "url" + logProto = "proto" + logProtoJmap = "jmap" + logProtoJmapWs = "jmapws" + logType = "type" + logTypeRequest = "request" + logTypeResponse = "response" + logTypePush = "push" +) + +/* +func bearer(req *http.Request, token string) { + req.Header.Add("Authorization", "Bearer "+base64.StdEncoding.EncodeToString([]byte(token))) +} +*/ + +// Record JMAP HTTP execution events that may occur, e.g. using metrics. +type HttpJmapApiClientEventListener interface { + OnSuccessfulRequest(endpoint string, status int) + OnFailedRequest(endpoint string, err error) + OnFailedRequestWithStatus(endpoint string, status int) + OnResponseBodyReadingError(endpoint string, err error) + OnResponseBodyUnmarshallingError(endpoint string, err error) + OnSuccessfulWsRequest(endpoint string, status int) + OnFailedWsHandshakeRequestWithStatus(endpoint string, status int) +} + +type nullHttpJmapApiClientEventListener struct { +} + +func (l nullHttpJmapApiClientEventListener) OnSuccessfulRequest(endpoint string, status int) { +} +func (l nullHttpJmapApiClientEventListener) OnFailedRequest(endpoint string, err error) { +} +func (l nullHttpJmapApiClientEventListener) OnFailedRequestWithStatus(endpoint string, status int) { +} +func (l nullHttpJmapApiClientEventListener) OnResponseBodyReadingError(endpoint string, err error) { +} +func (l nullHttpJmapApiClientEventListener) OnResponseBodyUnmarshallingError(endpoint string, err error) { +} +func (l nullHttpJmapApiClientEventListener) OnSuccessfulWsRequest(endpoint string, status int) { +} +func (l nullHttpJmapApiClientEventListener) OnFailedWsHandshakeRequestWithStatus(endpoint string, status int) { +} + +var _ HttpJmapApiClientEventListener = nullHttpJmapApiClientEventListener{} + +// An implementation of HttpJmapApiClientMetricsRecorder that does nothing. +func NullHttpJmapApiClientEventListener() HttpJmapApiClientEventListener { + return nullHttpJmapApiClientEventListener{} +} + +func NewHttpJmapClient(client *http.Client, masterUser string, masterPassword string, listener HttpJmapApiClientEventListener) *HttpJmapClient { + return &HttpJmapClient{ + client: client, + masterUser: masterUser, + masterPassword: masterPassword, + userAgent: "OpenCloud/" + version.GetString(), + listener: listener, + } +} + +func (h *HttpJmapClient) Close() error { + h.client.CloseIdleConnections() + return nil +} + +type AuthenticationError struct { + Err error +} + +func (e AuthenticationError) Error() string { + return fmt.Sprintf("failed to find user for authentication: %v", e.Err.Error()) +} +func (e AuthenticationError) Unwrap() error { + return e.Err +} + +func (h *HttpJmapClient) auth(username string, _ *log.Logger, req *http.Request) error { + masterUsername := username + "%" + h.masterUser + req.SetBasicAuth(masterUsername, h.masterPassword) + return nil +} + +var ( + errNilBaseUrl = errors.New("sessionUrl is nil") +) + +func (h *HttpJmapClient) GetSession(sessionUrl *url.URL, username string, logger *log.Logger) (SessionResponse, Error) { + if sessionUrl == nil { + logger.Error().Msg("sessionUrl is nil") + return SessionResponse{}, SimpleError{code: JmapErrorInvalidHttpRequest, err: errNilBaseUrl} + } + // See the JMAP specification on Service Autodiscovery: https://jmap.io/spec-core.html#service-autodiscovery + // There are two standardised autodiscovery methods in use for Internet protocols: + // - DNS SRV (see [@!RFC2782], [@!RFC6186], and [@!RFC6764]) + // - .well-known/servicename (see [@!RFC8615]) + // We are currently only supporting RFC8615, using the baseurl that was configured in this HttpJmapApiClient. + //sessionUrl := baseurl.JoinPath(".well-known", "jmap") + sessionUrlStr := sessionUrl.String() + endpoint := endpointOf(sessionUrl) + logger = log.From(logger.With().Str(logEndpoint, endpoint)) + + req, err := http.NewRequest(http.MethodGet, sessionUrlStr, nil) + if err != nil { + logger.Error().Err(err).Msgf("failed to create GET request for %v", sessionUrl) + return SessionResponse{}, SimpleError{code: JmapErrorInvalidHttpRequest, err: err} + } + h.auth(username, logger, req) + req.Header.Add("Cache-Control", "no-cache, no-store, must-revalidate") // spec recommendation + + res, err := h.client.Do(req) + if err != nil { + h.listener.OnFailedRequest(endpoint, err) + logger.Error().Err(err).Msgf("failed to perform GET %v", sessionUrl) + return SessionResponse{}, SimpleError{code: JmapErrorInvalidHttpRequest, err: err} + } + if res.StatusCode < 200 || res.StatusCode > 299 { + h.listener.OnFailedRequestWithStatus(endpoint, res.StatusCode) + logger.Error().Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode).Msg("HTTP response status code is not 200") + return SessionResponse{}, SimpleError{code: JmapErrorServerResponse, err: fmt.Errorf("JMAP API response status is %v", res.Status)} + } + h.listener.OnSuccessfulRequest(endpoint, res.StatusCode) + + if res.Body != nil { + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + logger.Error().Err(err).Msg("failed to close response body") + } + }(res.Body) + } + + body, err := io.ReadAll(res.Body) + if err != nil { + logger.Error().Err(err).Msg("failed to read response body") + h.listener.OnResponseBodyReadingError(endpoint, err) + return SessionResponse{}, SimpleError{code: JmapErrorReadingResponseBody, err: err} + } + + var data SessionResponse + err = json.Unmarshal(body, &data) + if err != nil { + logger.Error().Str(logHttpUrl, log.SafeString(sessionUrlStr)).Err(err).Msg("failed to decode JSON payload from .well-known/jmap response") + h.listener.OnResponseBodyUnmarshallingError(endpoint, err) + return SessionResponse{}, SimpleError{code: JmapErrorDecodingResponseBody, err: err} + } + + return data, nil +} + +func (h *HttpJmapClient) Command(ctx context.Context, logger *log.Logger, session *Session, request Request, acceptLanguage string) ([]byte, Language, Error) { + jmapUrl := session.JmapUrl.String() + endpoint := session.JmapEndpoint + logger = log.From(logger.With().Str(logEndpoint, endpoint)) + + bodyBytes, err := json.Marshal(request) + if err != nil { + logger.Error().Err(err).Msg("failed to marshall JSON payload") + return nil, "", SimpleError{code: JmapErrorEncodingRequestBody, err: err} + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, jmapUrl, bytes.NewBuffer(bodyBytes)) + if err != nil { + logger.Error().Err(err).Msgf("failed to create POST request for %v", jmapUrl) + return nil, "", SimpleError{code: JmapErrorCreatingRequest, err: err} + } + + // Some JMAP APIs use the Accept-Language header to determine which language to use to translate + // texts in attributes. + if acceptLanguage != "" { + req.Header.Add("Accept-Language", acceptLanguage) + } + + req.Header.Add("Content-Type", "application/json") + req.Header.Add("User-Agent", h.userAgent) + + if logger.Trace().Enabled() { + requestBytes, err := httputil.DumpRequestOut(req, true) + if err == nil { + logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmap).Str(logType, logTypeRequest).Msg(string(requestBytes)) + } + } + h.auth(session.Username, logger, req) + + res, err := h.client.Do(req) + if err != nil { + h.listener.OnFailedRequest(endpoint, err) + logger.Error().Err(err).Msgf("failed to perform POST %v", jmapUrl) + return nil, "", SimpleError{code: JmapErrorSendingRequest, err: err} + } + + if logger.Trace().Enabled() { + responseBytes, err := httputil.DumpResponse(res, true) + if err == nil { + logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmap).Str(logType, logTypeResponse). + Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode). + Msg(string(responseBytes)) + } + } + + language := Language(res.Header.Get("Content-Language")) + if res.StatusCode < 200 || res.StatusCode > 299 { + h.listener.OnFailedRequestWithStatus(endpoint, res.StatusCode) + logger.Error().Str(logEndpoint, endpoint).Str(logHttpStatus, log.SafeString(res.Status)).Msg("HTTP response status code is not 2xx") + return nil, language, SimpleError{code: JmapErrorServerResponse, err: err} + } + if res.Body != nil { + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + logger.Error().Err(err).Msg("failed to close response body") + } + }(res.Body) + } + h.listener.OnSuccessfulRequest(endpoint, res.StatusCode) + + body, err := io.ReadAll(res.Body) + if err != nil { + logger.Error().Err(err).Msg("failed to read response body") + h.listener.OnResponseBodyReadingError(endpoint, err) + return nil, language, SimpleError{code: JmapErrorServerResponse, err: err} + } + + return body, language, nil +} + +func (h *HttpJmapClient) UploadBinary(ctx context.Context, logger *log.Logger, session *Session, uploadUrl string, endpoint string, contentType string, acceptLanguage string, body io.Reader) (UploadedBlob, Language, Error) { + logger = log.From(logger.With().Str(logEndpoint, endpoint)) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, uploadUrl, body) + if err != nil { + logger.Error().Err(err).Msgf("failed to create POST request for %v", uploadUrl) + return UploadedBlob{}, "", SimpleError{code: JmapErrorCreatingRequest, err: err} + } + req.Header.Add("Content-Type", contentType) + req.Header.Add("User-Agent", h.userAgent) + if acceptLanguage != "" { + req.Header.Add("Accept-Language", acceptLanguage) + } + if logger.Trace().Enabled() { + requestBytes, err := httputil.DumpRequestOut(req, false) + if err == nil { + logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmap).Str(logType, logTypeRequest).Msg(string(requestBytes)) + } + } + + h.auth(session.Username, logger, req) + + res, err := h.client.Do(req) + if err != nil { + h.listener.OnFailedRequest(endpoint, err) + logger.Error().Err(err).Msgf("failed to perform POST %v", uploadUrl) + return UploadedBlob{}, "", SimpleError{code: JmapErrorSendingRequest, err: err} + } + if logger.Trace().Enabled() { + responseBytes, err := httputil.DumpResponse(res, true) + if err == nil { + logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmap).Str(logType, logTypeResponse). + Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode). + Msg(string(responseBytes)) + } + } + + language := Language(res.Header.Get("Content-Language")) + if res.StatusCode < 200 || res.StatusCode > 299 { + h.listener.OnFailedRequestWithStatus(endpoint, res.StatusCode) + logger.Error().Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode).Msg("HTTP response status code is not 2xx") + return UploadedBlob{}, language, SimpleError{code: JmapErrorServerResponse, err: err} + } + if res.Body != nil { + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + logger.Error().Err(err).Msg("failed to close response body") + } + }(res.Body) + } + h.listener.OnSuccessfulRequest(endpoint, res.StatusCode) + + responseBody, err := io.ReadAll(res.Body) + if err != nil { + logger.Error().Err(err).Msg("failed to read response body") + h.listener.OnResponseBodyReadingError(endpoint, err) + return UploadedBlob{}, language, SimpleError{code: JmapErrorServerResponse, err: err} + } + + logger.Trace() + + var result UploadedBlob + err = json.Unmarshal(responseBody, &result) + if err != nil { + logger.Error().Str(logHttpUrl, log.SafeString(uploadUrl)).Err(err).Msg("failed to decode JSON payload from the upload response") + h.listener.OnResponseBodyUnmarshallingError(endpoint, err) + return UploadedBlob{}, language, SimpleError{code: JmapErrorDecodingResponseBody, err: err} + } + + return result, language, nil +} + +func (h *HttpJmapClient) DownloadBinary(ctx context.Context, logger *log.Logger, session *Session, downloadUrl string, endpoint string, acceptLanguage string) (*BlobDownload, Language, Error) { + logger = log.From(logger.With().Str(logEndpoint, endpoint)) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, downloadUrl, nil) + if err != nil { + logger.Error().Err(err).Msgf("failed to create GET request for %v", downloadUrl) + return nil, "", SimpleError{code: JmapErrorCreatingRequest, err: err} + } + req.Header.Add("User-Agent", h.userAgent) + if acceptLanguage != "" { + req.Header.Add("Accept-Language", acceptLanguage) + } + if logger.Trace().Enabled() { + requestBytes, err := httputil.DumpRequestOut(req, true) + if err == nil { + logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmap).Str(logType, logTypeRequest).Msg(string(requestBytes)) + } + } + h.auth(session.Username, logger, req) + + res, err := h.client.Do(req) + if err != nil { + h.listener.OnFailedRequest(endpoint, err) + logger.Error().Err(err).Msgf("failed to perform GET %v", downloadUrl) + return nil, "", SimpleError{code: JmapErrorSendingRequest, err: err} + } + if logger.Trace().Enabled() { + responseBytes, err := httputil.DumpResponse(res, false) + if err == nil { + logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmap).Str(logType, logTypeResponse). + Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode). + Msg(string(responseBytes)) + } + } + language := Language(res.Header.Get("Content-Language")) + if res.StatusCode == http.StatusNotFound { + return nil, language, nil + } + if res.StatusCode < 200 || res.StatusCode > 299 { + h.listener.OnFailedRequestWithStatus(endpoint, res.StatusCode) + logger.Error().Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode).Msg("HTTP response status code is not 2xx") + return nil, language, SimpleError{code: JmapErrorServerResponse, err: err} + } + h.listener.OnSuccessfulRequest(endpoint, res.StatusCode) + + sizeStr := res.Header.Get("Content-Length") + size := -1 + if sizeStr != "" { + size, err = strconv.Atoi(sizeStr) + if err != nil { + logger.Warn().Err(err).Msgf("failed to parse Content-Length blob download response header value '%v'", sizeStr) + size = -1 + } + } + + return &BlobDownload{ + Body: res.Body, + Size: size, + Type: res.Header.Get("Content-Type"), + ContentDisposition: res.Header.Get("Content-Disposition"), + CacheControl: res.Header.Get("Cache-Control"), + }, language, nil +} + +type WebSocketPushEnableType string +type WebSocketPushDisableType string + +const ( + WebSocketPushTypeEnable = WebSocketPushEnableType("WebSocketPushEnable") + WebSocketPushTypeDisable = WebSocketPushDisableType("WebSocketPushDisable") +) + +type WebSocketPushEnable struct { + // This MUST be the string "WebSocketPushEnable". + Type WebSocketPushEnableType `json:"@type"` + + // A list of data type names (e.g., "Mailbox" or "Email") that the client is interested in. + // + // A StateChange notification will only be sent if the data for one of these types changes. + // Other types are omitted from the TypeState object. + // + // If null, changes will be pushed for all supported data types. + DataTypes *[]string `json:"dataTypes"` + + // The last "pushState" token that the client received from the server. + + // Upon receipt of a "pushState" token, the server SHOULD immediately send all changes since that state token. + PushState State `json:"pushState,omitempty"` +} + +type WebSocketPushDisable struct { + // This MUST be the string "WebSocketPushDisable". + Type WebSocketPushDisableType `json:"@type"` +} + +type HttpWsClientFactory struct { + dialer *websocket.Dialer + masterUser string + masterPassword string + logger *log.Logger + eventListener HttpJmapApiClientEventListener +} + +var _ WsClientFactory = &HttpWsClientFactory{} + +func NewHttpWsClientFactory(dialer *websocket.Dialer, masterUser string, masterPassword string, logger *log.Logger, + eventListener HttpJmapApiClientEventListener) (*HttpWsClientFactory, error) { + // RFC 8887: Section 4.2: + // Otherwise, the client MUST make an authenticated HTTP request [RFC7235] on the encrypted connection + // and MUST include the value "jmap" in the list of protocols for the "Sec-WebSocket-Protocol" header + // field. + dialer.Subprotocols = []string{"jmap"} + + return &HttpWsClientFactory{ + dialer: dialer, + masterUser: masterUser, + masterPassword: masterPassword, + logger: logger, + eventListener: eventListener, + }, nil +} + +func (w *HttpWsClientFactory) auth(username string, h http.Header) error { + masterUsername := username + "%" + w.masterUser + h.Add("Authorization", "Basic "+base64.StdEncoding.EncodeToString([]byte(masterUsername+":"+w.masterPassword))) + return nil +} + +func (w *HttpWsClientFactory) connect(sessionProvider func() (*Session, error)) (*websocket.Conn, string, string, Error) { + logger := w.logger + + session, err := sessionProvider() + if err != nil { + return nil, "", "", SimpleError{code: JmapErrorWssFailedToRetrieveSession, err: err} + } + if session == nil { + return nil, "", "", SimpleError{code: JmapErrorWssFailedToRetrieveSession, err: nil} + } + + if !session.SupportsWebsocketPush { + return nil, "", "", SimpleError{code: JmapErrorSocketPushUnsupported, err: nil} + } + + username := session.Username + u := session.WebsocketUrl + endpoint := session.WebsocketEndpoint + + ctx := context.Background() // TODO WS connection context with a timeout? + + h := http.Header{} + w.auth(username, h) + c, res, err := w.dialer.DialContext(ctx, u.String(), h) + if err != nil { + return nil, "", endpoint, SimpleError{code: JmapErrorFailedToEstablishWssConnection, err: err} + } + + if w.logger.Trace().Enabled() { + responseBytes, err := httputil.DumpResponse(res, true) + if err == nil { + logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmapWs).Str(logType, logTypeResponse). + Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode). + Msg(string(responseBytes)) + } + } + + if res.StatusCode != 101 { + w.eventListener.OnFailedRequestWithStatus(endpoint, res.StatusCode) + logger.Error().Str(logHttpStatus, log.SafeString(res.Status)).Int(logHttpStatusCode, res.StatusCode).Msg("HTTP response status code is not 101") + return nil, "", endpoint, SimpleError{code: JmapErrorServerResponse, err: fmt.Errorf("JMAP WS API response status is %v", res.Status)} + } else { + w.eventListener.OnSuccessfulWsRequest(endpoint, res.StatusCode) + } + + // RFC 8887: Section 4.2: + // The reply from the server MUST also contain a corresponding "Sec-WebSocket-Protocol" header + // field with a value of "jmap" in order for a JMAP subprotocol connection to be established. + if !slices.Contains(res.Header.Values("Sec-WebSocket-Protocol"), "jmap") { + return nil, "", endpoint, SimpleError{code: JmapErrorWssConnectionResponseMissingJmapSubprotocol} + } + + return c, username, endpoint, nil +} + +type HttpWsClient struct { + client *HttpWsClientFactory + username string + sessionProvider func() (*Session, error) + c *websocket.Conn + logger *log.Logger + endpoint string + listener WsPushListener + WsClient +} + +func (w *HttpWsClient) readPump() { + defer func() { + w.c.Close() + }() + //w.c.SetReadLimit(maxMessageSize) + //c.conn.SetReadDeadline(time.Now().Add(pongWait)) + //c.conn.SetPongHandler(func(string) error { c.conn.SetReadDeadline(time.Now().Add(pongWait)); return nil }) + + logger := log.From(w.logger.With().Str("username", w.username)) + + for { + if _, message, err := w.c.ReadMessage(); err != nil { + if websocket.IsUnexpectedCloseError(err, websocket.CloseGoingAway, websocket.CloseAbnormalClosure) { + logger.Error().Err(err).Msg("unexpected close") + } + break + } else { + if logger.Trace().Enabled() { + logger.Trace().Str(logEndpoint, w.endpoint).Str(logProto, logProtoJmapWs).Str(logType, logTypePush).Msg(string(message)) + } + + var peek struct { + Type string `json:"@type"` + } + if err := json.Unmarshal(message, &peek); err != nil { + logger.Error().Err(err).Msg("failed to deserialized pushed WS message") + continue + } + switch peek.Type { + case string(TypeOfStateChange): + var stateChange StateChange + if err := json.Unmarshal(message, &stateChange); err != nil { + logger.Error().Err(err).Msgf("failed to deserialized pushed WS message into a %T", stateChange) + continue + } else { + if w.listener != nil { + w.listener.OnNotification(w.username, stateChange) + } else { + logger.Warn().Msgf("no listener to be notified of %v", stateChange) + } + } + default: + logger.Warn().Msgf("unsupported pushed WS message JMAP @type: '%s'", peek.Type) + continue + } + } + } +} + +func (w *HttpWsClientFactory) EnableNotifications(pushState State, sessionProvider func() (*Session, error), listener WsPushListener) (WsClient, Error) { + c, username, endpoint, jerr := w.connect(sessionProvider) + if jerr != nil { + return nil, jerr + } + + msg := WebSocketPushEnable{ + Type: WebSocketPushTypeEnable, + DataTypes: nil, // = all datatypes + PushState: pushState, // will be omitted if empty string + } + + data, err := json.Marshal(msg) + if err != nil { + return nil, SimpleError{code: JmapErrorWssFailedToSendWebSocketPushEnable, err: err} + } + + if w.logger.Trace().Enabled() { + w.logger.Trace().Str(logEndpoint, endpoint).Str(logProto, logProtoJmapWs).Str(logType, logTypeRequest).Msg(string(data)) + } + if err := c.WriteMessage(websocket.TextMessage, data); err != nil { + return nil, SimpleError{code: JmapErrorWssFailedToSendWebSocketPushEnable, err: err} + } + + wsc := &HttpWsClient{ + client: w, + username: username, + sessionProvider: sessionProvider, + c: c, + logger: w.logger, + endpoint: endpoint, + listener: listener, + } + + go wsc.readPump() + + return wsc, nil +} + +func (w *HttpWsClientFactory) Close() error { + return nil +} + +func (c *HttpWsClient) DisableNotifications() Error { + if c.c == nil { + return nil + } + + werr := c.c.WriteJSON(WebSocketPushDisable{Type: WebSocketPushTypeDisable}) + merr := c.c.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) + cerr := c.c.Close() + + if werr != nil { + return SimpleError{code: JmapErrorWssFailedToClose, err: werr} + } + if merr != nil { + return SimpleError{code: JmapErrorWssFailedToClose, err: merr} + } + if cerr != nil { + return SimpleError{code: JmapErrorWssFailedToClose, err: cerr} + } + return nil +} + +func (c *HttpWsClient) Close() error { + return c.DisableNotifications() +} diff --git a/pkg/jmap/jmap_integration_contact_test.go b/pkg/jmap/jmap_integration_contact_test.go new file mode 100644 index 0000000000..decdbf3e8f --- /dev/null +++ b/pkg/jmap/jmap_integration_contact_test.go @@ -0,0 +1,494 @@ +package jmap + +import ( + "math/rand" + "regexp" + "testing" + + "github.com/stretchr/testify/require" + + "bytes" + "encoding/base64" + "fmt" + "log" + "math" + "strconv" + "strings" + + "github.com/ProtonMail/go-crypto/openpgp" + "github.com/brianvoe/gofakeit/v7" + "github.com/opencloud-eu/opencloud/pkg/jscontact" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +const ( + // currently not supported, reported as https://github.com/stalwartlabs/stalwart/issues/2431 + EnableMediaWithBlobId = false +) + +func TestContacts(t *testing.T) { + if skip(t) { + return + } + + count := uint(20 + rand.Intn(30)) + + require := require.New(t) + + s, err := newStalwartTest(t) + require.NoError(err) + defer s.Close() + + user := pickUser() + session := s.Session(user.name) + + accountId, addressbookId, expectedContactCardsById, boxes, err := s.fillContacts(t, count, session, user) + require.NoError(err) + require.NotEmpty(accountId) + require.NotEmpty(addressbookId) + + filter := ContactCardFilterCondition{ + InAddressBook: addressbookId, + } + sortBy := []ContactCardComparator{ + {Property: jscontact.ContactCardPropertyCreated, IsAscending: true}, + } + + contactsByAccount, _, _, _, err := s.client.QueryContactCards([]string{accountId}, session, t.Context(), s.logger, "", filter, sortBy, 0, 0) + require.NoError(err) + + require.Len(contactsByAccount, 1) + require.Contains(contactsByAccount, accountId) + contacts := contactsByAccount[accountId] + require.Len(contacts, int(count)) + + for _, actual := range contacts { + expected, ok := expectedContactCardsById[actual.Id] + require.True(ok, "failed to find created contact by its id") + matchContact(t, actual, expected) + } + + exceptions := []string{} + if !EnableMediaWithBlobId { + exceptions = append(exceptions, "mediaWithBlobId") + } + allBoxesAreTicked(t, boxes, exceptions...) +} + +func matchContact(t *testing.T, actual jscontact.ContactCard, expected jscontact.ContactCard) { + // require.Equal(t, expected, actual) + deepEqual(t, expected, actual) +} + +type ContactsBoxes struct { + nicknames bool + secondaryEmails bool + secondaryAddress bool + phones bool + onlineService bool + preferredLanguage bool + mediaWithBlobId bool + mediaWithDataUri bool + mediaWithExternalUri bool + organization bool + cryptoKey bool + link bool +} + +var streetNumberRegex = regexp.MustCompile(`^(\d+)\s+(.+)$`) + +func (s *StalwartTest) fillContacts( + t *testing.T, + count uint, + session *Session, + user User, +) (string, string, map[string]jscontact.ContactCard, ContactsBoxes, error) { + require := require.New(t) + c, err := NewTestJmapClient(session, user.name, user.password, true, true) + require.NoError(err) + defer c.Close() + + boxes := ContactsBoxes{} + + printer := func(s string) { log.Println(s) } + + accountId := c.session.PrimaryAccounts.Contacts + require.NotEmpty(accountId, "no primary account for contacts in session") + + addressbookId := "" + { + addressBooksById, err := c.objectsById(accountId, AddressBookType, JmapContacts) + require.NoError(err) + + for id, addressbook := range addressBooksById { + if isDefault, ok := addressbook["isDefault"]; ok { + if isDefault.(bool) { + addressbookId = id + break + } + } + } + } + require.NotEmpty(addressbookId) + + filled := map[string]jscontact.ContactCard{} + for i := range count { + person := gofakeit.Person() + nameMap, nameObj := createName(person) + language := pickLanguage() + contact := map[string]any{ + "@type": "Card", + "version": "1.0", + "addressBookIds": toBoolMap([]string{addressbookId}), + "prodId": productName, + "language": language, + "kind": "individual", + "name": nameMap, + } + card := jscontact.ContactCard{ + Type: jscontact.ContactCardType, + Version: "1.0", + AddressBookIds: toBoolMap([]string{addressbookId}), + ProdId: productName, + Language: language, + Kind: jscontact.ContactCardKindIndividual, + Name: &nameObj, + } + + if i%3 == 0 { + nicknameMap, nicknameObj := createNickName(person) + id := id() + contact["nicknames"] = map[string]map[string]any{id: nicknameMap} + card.Nicknames = map[string]jscontact.Nickname{id: nicknameObj} + boxes.nicknames = true + } + + { + emailMaps := map[string]map[string]any{} + emailObjs := map[string]jscontact.EmailAddress{} + emailId := id() + emailMap, emailObj := createEmail(person, 10) + emailMaps[emailId] = emailMap + emailObjs[emailId] = emailObj + + for i := range rand.Intn(3) { + id := id() + m, o := createSecondaryEmail(gofakeit.Email(), i*100) + emailMaps[id] = m + emailObjs[id] = o + boxes.secondaryEmails = true + } + if len(emailMaps) > 0 { + contact["emails"] = emailMaps + card.Emails = emailObjs + } + } + if err := propmap(i%2 == 0, 1, 2, contact, "phones", &card.Phones, func(i int, id string) (map[string]any, jscontact.Phone, error) { + boxes.phones = true + num := person.Contact.Phone + if i > 0 { + num = gofakeit.Phone() + } + var features map[jscontact.PhoneFeature]bool = nil + if rand.Intn(3) < 2 { + features = toBoolMapS(jscontact.PhoneFeatureMobile, jscontact.PhoneFeatureVoice, jscontact.PhoneFeatureVideo, jscontact.PhoneFeatureText) + } else { + features = toBoolMapS(jscontact.PhoneFeatureVoice, jscontact.PhoneFeatureMainNumber) + } + + contexts := map[jscontact.PhoneContext]bool{jscontact.PhoneContextWork: true} + if rand.Intn(2) < 1 { + contexts[jscontact.PhoneContextPrivate] = true + } + tel := "tel:" + "+1" + num + return map[string]any{ + "@type": "Phone", + "number": tel, + "features": structs.MapKeys(features, func(f jscontact.PhoneFeature) string { return string(f) }), + "contexts": structs.MapKeys(contexts, func(c jscontact.PhoneContext) string { return string(c) }), + }, jscontact.Phone{ + Type: jscontact.PhoneType, + Number: tel, + Features: features, + Contexts: contexts, + }, nil + }); err != nil { + return "", "", nil, boxes, err + } + if err := propmap(i%5 < 4, 1, 2, contact, "addresses", &card.Addresses, func(i int, id string) (map[string]any, jscontact.Address, error) { + var source *gofakeit.AddressInfo + if i == 0 { + source = person.Address + } else { + source = gofakeit.Address() + boxes.secondaryAddress = true + } + components := []jscontact.AddressComponent{} + m := streetNumberRegex.FindAllStringSubmatch(source.Street, -1) + if m != nil { + components = append(components, jscontact.AddressComponent{Type: jscontact.AddressComponentType, Kind: jscontact.AddressComponentKindName, Value: m[0][2]}) + components = append(components, jscontact.AddressComponent{Type: jscontact.AddressComponentType, Kind: jscontact.AddressComponentKindNumber, Value: m[0][1]}) + } else { + components = append(components, jscontact.AddressComponent{Type: jscontact.AddressComponentType, Kind: jscontact.AddressComponentKindName, Value: source.Street}) + } + components = append(components, + jscontact.AddressComponent{Type: jscontact.AddressComponentType, Kind: jscontact.AddressComponentKindLocality, Value: source.City}, + jscontact.AddressComponent{Type: jscontact.AddressComponentType, Kind: jscontact.AddressComponentKindCountry, Value: source.Country}, + jscontact.AddressComponent{Type: jscontact.AddressComponentType, Kind: jscontact.AddressComponentKindRegion, Value: source.State}, + jscontact.AddressComponent{Type: jscontact.AddressComponentType, Kind: jscontact.AddressComponentKindPostcode, Value: source.Zip}, + ) + tz := pickRandom(timezones...) + return map[string]any{ + "@type": "Address", + "components": structs.Map(components, func(c jscontact.AddressComponent) map[string]string { + return map[string]string{"kind": string(c.Kind), "value": c.Value} + }), + "defaultSeparator": ", ", + "isOrdered": true, + "timeZone": tz, + }, jscontact.Address{ + Type: jscontact.AddressType, + Components: components, + DefaultSeparator: ", ", + IsOrdered: true, + TimeZone: tz, + }, nil + }); err != nil { + return "", "", nil, boxes, err + } + if err := propmap(i%2 == 0, 1, 2, contact, "onlineServices", &card.OnlineServices, func(i int, id string) (map[string]any, jscontact.OnlineService, error) { + boxes.onlineService = true + switch rand.Intn(3) { + case 0: + return map[string]any{ + "@type": "OnlineService", + "service": "Mastodon", + "user": "@" + person.Contact.Email, + "uri": "https://mastodon.example.com/@" + strings.ToLower(person.FirstName), + }, jscontact.OnlineService{ + Type: jscontact.OnlineServiceType, + Service: "Mastodon", + User: "@" + person.Contact.Email, + Uri: "https://mastodon.example.com/@" + strings.ToLower(person.FirstName), + }, nil + case 1: + return map[string]any{ + "@type": "OnlineService", + "uri": "xmpp:" + person.Contact.Email, + }, jscontact.OnlineService{ + Type: jscontact.OnlineServiceType, + Uri: "xmpp:" + person.Contact.Email, + }, nil + default: + return map[string]any{ + "@type": "OnlineService", + "service": "Discord", + "user": person.Contact.Email, + "uri": "https://discord.example.com/user/" + person.Contact.Email, + }, jscontact.OnlineService{ + Type: jscontact.OnlineServiceType, + Service: "Discord", + User: person.Contact.Email, + Uri: "https://discord.example.com/user/" + person.Contact.Email, + }, nil + } + }); err != nil { + return "", "", nil, boxes, err + } + + if err := propmap(i%3 == 0, 1, 2, contact, "preferredLanguages", &card.PreferredLanguages, func(i int, id string) (map[string]any, jscontact.LanguagePref, error) { + boxes.preferredLanguage = true + lang := pickRandom("en", "fr", "de", "es", "it") + contexts := pickRandoms1("work", "private") + return map[string]any{ + "@type": "LanguagePref", + "language": lang, + "contexts": toBoolMap(contexts), + "pref": i + 1, + }, jscontact.LanguagePref{ + Type: jscontact.LanguagePrefType, + Language: lang, + Contexts: toBoolMap(structs.Map(contexts, func(s string) jscontact.LanguagePrefContext { return jscontact.LanguagePrefContext(s) })), + Pref: uint(i + 1), + }, nil + }); err != nil { + return "", "", nil, boxes, err + } + + if i%2 == 0 { + organizationMaps := map[string]map[string]any{} + organizationObjs := map[string]jscontact.Organization{} + titleMaps := map[string]map[string]any{} + titleObjs := map[string]jscontact.Title{} + for range 1 + rand.Intn(2) { + boxes.organization = true + orgId := id() + titleId := id() + organizationMaps[orgId] = map[string]any{ + "@type": "Organization", + "name": person.Job.Company, + "contexts": toBoolMapS("work"), + } + organizationObjs[orgId] = jscontact.Organization{ + Type: jscontact.OrganizationType, + Name: person.Job.Company, + Contexts: toBoolMapS(jscontact.OrganizationContextWork), + } + titleMaps[titleId] = map[string]any{ + "@type": "Title", + "kind": "title", + "name": person.Job.Title, + "organizationId": orgId, + } + titleObjs[titleId] = jscontact.Title{ + Type: jscontact.TitleType, + Kind: jscontact.TitleKindTitle, + Name: person.Job.Title, + OrganizationId: orgId, + } + } + contact["organizations"] = organizationMaps + contact["titles"] = titleMaps + card.Organizations = organizationObjs + card.Titles = titleObjs + } + + if err := propmap(i%2 == 0, 1, 1, contact, "cryptoKeys", &card.CryptoKeys, func(i int, id string) (map[string]any, jscontact.CryptoKey, error) { + boxes.cryptoKey = true + entity, err := openpgp.NewEntity(person.FirstName+" "+person.LastName, "test", person.Contact.Email, nil) + if err != nil { + return nil, jscontact.CryptoKey{}, err + } + var b bytes.Buffer + err = entity.PrimaryKey.Serialize(&b) + if err != nil { + return nil, jscontact.CryptoKey{}, err + } + encoded := base64.RawStdEncoding.EncodeToString(b.Bytes()) + return map[string]any{ + "@type": "CryptoKey", + "uri": "data:application/pgp-keys;base64," + encoded, + "mediaType": "application/pgp-keys", + }, jscontact.CryptoKey{ + Type: jscontact.CryptoKeyType, + Uri: "data:application/pgp-keys;base64," + encoded, + MediaType: "application/pgp-keys", + }, nil + }); err != nil { + return "", "", nil, boxes, err + } + + if err := propmap(i%2 == 0, 1, 2, contact, "media", &card.Media, func(i int, id string) (map[string]any, jscontact.Media, error) { + label := fmt.Sprintf("photo-%d", 1000+rand.Intn(9000)) + + r := 0 + if EnableMediaWithBlobId { + r = rand.Intn(3) + } else { + r = rand.Intn(2) + } + + switch r { + case 0: + boxes.mediaWithDataUri = true + // use data uri + //size := 16 + rand.Intn(512-16+1) // <- let's not do that right now, makes debugging errors very difficult due to the ASCII wall noise + size := pickRandom(16, 24, 32, 48, 64) + img := gofakeit.ImagePng(size, size) + mime := "image/png" + uri := "data:" + mime + ";base64," + base64.StdEncoding.EncodeToString(img) + contexts := toBoolMapS(jscontact.MediaContextPrivate) + return map[string]any{ + "@type": "Media", + "kind": string(jscontact.MediaKindPhoto), + "uri": uri, + "mediaType": mime, + "contexts": structs.MapKeys(contexts, func(c jscontact.MediaContext) string { return string(c) }), + "label": label, + }, jscontact.Media{ + Type: jscontact.MediaType, + Kind: jscontact.MediaKindPhoto, + Uri: uri, + MediaType: mime, + Contexts: contexts, + Label: label, + }, nil + + case 1: + boxes.mediaWithExternalUri = true + // use external uri + uri := externalImageUri() + contexts := toBoolMapS(jscontact.MediaContextWork) + return map[string]any{ + "@type": "Media", + "kind": string(jscontact.MediaKindPhoto), + "uri": uri, + "contexts": structs.MapKeys(contexts, func(c jscontact.MediaContext) string { return string(c) }), + "label": label, + }, jscontact.Media{ + Type: jscontact.MediaType, + Kind: jscontact.MediaKindPhoto, + Uri: uri, + Contexts: contexts, + Label: label, + }, nil + + default: + boxes.mediaWithBlobId = true + size := pickRandom(16, 24, 32, 48, 64) + img := gofakeit.ImageJpeg(size, size) + blob, err := c.uploadBlob(accountId, img, "image/jpeg") + if err != nil { + return nil, jscontact.Media{}, err + } + contexts := toBoolMapS(jscontact.MediaContextPrivate) + return map[string]any{ + "@type": "Media", + "kind": string(jscontact.MediaKindPhoto), + "blobId": blob.BlobId, + "contexts": structs.MapKeys(contexts, func(c jscontact.MediaContext) string { return string(c) }), + "label": label, + }, jscontact.Media{ + Type: jscontact.MediaType, + Kind: jscontact.MediaKindPhoto, + BlobId: blob.BlobId, + MediaType: blob.Type, + Contexts: contexts, + Label: label, + }, nil + + } + }); err != nil { + return "", "", nil, boxes, err + } + if err := propmap(i%2 == 0, 1, 1, contact, "links", &card.Links, func(i int, id string) (map[string]any, jscontact.Link, error) { + boxes.link = true + return map[string]any{ + "@type": "Link", + "kind": "contact", + "uri": "mailto:" + person.Contact.Email, + "pref": (i + 1) * 10, + }, jscontact.Link{ + Type: jscontact.LinkType, + Kind: jscontact.LinkKindContact, + Uri: "mailto:" + person.Contact.Email, + Pref: uint((i + 1) * 10), + }, nil + }); err != nil { + return "", "", nil, boxes, err + } + + id, err := s.CreateContact(c, accountId, contact) + if err != nil { + return "", "", nil, boxes, err + } + card.Id = id + filled[id] = card + printer(fmt.Sprintf("🧑🏻 created %*s/%v id=%v", int(math.Log10(float64(count))+1), strconv.Itoa(int(i+1)), count, id)) + } + return accountId, addressbookId, filled, boxes, nil +} + +func (s *StalwartTest) CreateContact(j *TestJmapClient, accountId string, contact map[string]any) (string, error) { + return j.create1(accountId, ContactCardType, JmapContacts, contact) +} diff --git a/pkg/jmap/jmap_integration_email_test.go b/pkg/jmap/jmap_integration_email_test.go new file mode 100644 index 0000000000..433eb5cf3a --- /dev/null +++ b/pkg/jmap/jmap_integration_email_test.go @@ -0,0 +1,748 @@ +package jmap + +import ( + "maps" + "math/rand" + "slices" + "strings" + "testing" + + "bytes" + "crypto/tls" + "fmt" + "log" + "net" + "net/mail" + "regexp" + "strconv" + "time" + + "github.com/brianvoe/gofakeit/v7" + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/imapclient" + "github.com/jhillyerd/enmime/v2" + "github.com/opencloud-eu/opencloud/pkg/structs" + "github.com/stretchr/testify/require" +) + +func TestEmails(t *testing.T) { + if skip(t) { + return + } + + count := 15 + rand.Intn(20) + + require := require.New(t) + + s, err := newStalwartTest(t) + require.NoError(err) + defer s.Close() + + user := pickUser() + session := s.Session(user.name) + + accountId := session.PrimaryAccounts.Mail + + inboxId, inboxFolder := s.findInbox(t, accountId, session) + + var threads int = 0 + var mails []filledMail = nil + { + mails, threads, err = s.fillEmailsWithImap(inboxFolder, count, false, user) + require.NoError(err) + } + mailsByMessageId := structs.Index(mails, func(mail filledMail) string { return mail.messageId }) + + { + { + resp, sessionState, _, _, err := s.client.GetAllIdentities(accountId, session, s.ctx, s.logger, "") + require.NoError(err) + require.Equal(session.State, sessionState) + require.Len(resp, 1) + require.Equal(user.email, resp[0].Email) + require.Equal(user.description, resp[0].Name) + } + + { + respByAccountId, sessionState, _, _, err := s.client.GetAllMailboxes([]string{accountId}, session, s.ctx, s.logger, "") + require.NoError(err) + require.Equal(session.State, sessionState) + require.Len(respByAccountId, 1) + require.Contains(respByAccountId, accountId) + resp := respByAccountId[accountId] + mailboxesUnreadByRole := map[string]int{} + for _, m := range resp { + if m.Role != "" { + mailboxesUnreadByRole[m.Role] = m.UnreadEmails + } + } + require.LessOrEqual(mailboxesUnreadByRole["inbox"], count) + } + + { + resp, sessionState, _, _, err := s.client.GetAllEmailsInMailbox(accountId, session, s.ctx, s.logger, "", inboxId, 0, 0, true, false, 0, true) + require.NoError(err) + require.Equal(session.State, sessionState) + + require.Equalf(threads, len(resp.Emails), "the number of collapsed emails in the inbox is expected to be %v, but is actually %v", threads, len(resp.Emails)) + for _, e := range resp.Emails { + require.Len(e.MessageId, 1) + expectation, ok := mailsByMessageId[e.MessageId[0]] + require.True(ok) + matchEmail(t, e, expectation, false) + } + } + + { + resp, sessionState, _, _, err := s.client.GetAllEmailsInMailbox(accountId, session, s.ctx, s.logger, "", inboxId, 0, 0, false, false, 0, true) + require.NoError(err) + require.Equal(session.State, sessionState) + + require.Equalf(count, len(resp.Emails), "the number of emails in the inbox is expected to be %v, but is actually %v", count, len(resp.Emails)) + for _, e := range resp.Emails { + require.Len(e.MessageId, 1) + expectation, ok := mailsByMessageId[e.MessageId[0]] + require.True(ok) + matchEmail(t, e, expectation, false) + } + } + } +} + +func TestSendingEmails(t *testing.T) { + if skip(t) { + return + } + + require := require.New(t) + + s, err := newStalwartTest(t) + require.NoError(err) + defer s.Close() + + from := pickUser() + session := s.Session(from.name) + accountId := session.PrimaryAccounts.Mail + + var to User + { + others := structs.Filter(users[:], func(u User) bool { return u.name != from.name }) + to = others[rand.Intn(len(others))] + } + toSession := s.Session(to.name) + toAccountId := toSession.PrimaryAccounts.Mail + + var cc User + { + others := structs.Filter(users[:], func(u User) bool { return u.name != from.name && u.name != to.name }) + cc = others[rand.Intn(len(others))] + } + ccSession := s.Session(cc.name) + ccAccountId := ccSession.PrimaryAccounts.Mail + + var mailboxPerRole map[string]Mailbox + { + mailboxes, _, _, _, err := s.client.GetAllMailboxes([]string{accountId}, session, s.ctx, s.logger, "") + require.NoError(err) + mailboxPerRole = structs.Index(mailboxes[accountId], func(m Mailbox) string { return m.Role }) + require.Contains(mailboxPerRole, JmapMailboxRoleInbox) + require.Contains(mailboxPerRole, JmapMailboxRoleDrafts) + require.Contains(mailboxPerRole, JmapMailboxRoleSent) + require.Contains(mailboxPerRole, JmapMailboxRoleTrash) + } + { + roles := []string{JmapMailboxRoleDrafts, JmapMailboxRoleSent, JmapMailboxRoleInbox} + m, _, _, _, err := s.client.SearchMailboxIdsPerRole([]string{accountId}, session, s.ctx, s.logger, "", roles) + require.NoError(err) + require.Contains(m, accountId) + a := m[accountId] + for _, role := range roles { + require.Contains(a, role) + } + } + + // let's ensure that the recipients have zero emails in their mailboxes before we send them any + for _, u := range []struct { + accountId string + session *Session + }{{toAccountId, toSession}, {ccAccountId, ccSession}} { + mailboxes, _, _, _, err := s.client.GetAllMailboxes([]string{u.accountId}, u.session, s.ctx, s.logger, "") + require.NoError(err) + for _, mailbox := range mailboxes[u.accountId] { + require.Equal(0, mailbox.TotalEmails) + } + } + + subject := fmt.Sprintf("Test Subject %d", 10000+rand.Intn(90000)) + fromName := fmt.Sprintf("%s (test %d)", from.name, 1000+rand.Intn(9000)) + sender := EmailAddress{Email: from.email, Name: from.description} + + { + var identity Identity + { + identities, _, _, _, err := s.client.GetAllIdentities(accountId, session, s.ctx, s.logger, "") + require.NoError(err) + require.NotEmpty(identities) + identity = identities[0] + } + + create := EmailCreate{ + Keywords: toBoolMapS("test"), + Subject: subject, + MailboxIds: toBoolMapS(mailboxPerRole[JmapMailboxRoleDrafts].Id), + } + created, _, _, _, err := s.client.CreateEmail(accountId, create, "", session, s.ctx, s.logger, "") + require.NoError(err) + require.NotEmpty(created.Id) + + { + emails, notFound, _, _, _, err := s.client.GetEmails(accountId, session, s.ctx, s.logger, "", []string{created.Id}, true, 0, false, false) + require.NoError(err) + require.Len(emails, 1) + require.Empty(notFound) + email := emails[0] + require.Equal(created.Id, email.Id) + require.Len(email.MailboxIds, 1) + require.Contains(email.MailboxIds, mailboxPerRole[JmapMailboxRoleDrafts].Id) + } + + update := EmailCreate{ + From: []EmailAddress{{Name: fromName, Email: from.email}}, + To: []EmailAddress{{Name: to.description, Email: to.email}}, + Cc: []EmailAddress{{Name: cc.description, Email: cc.email}}, + Sender: []EmailAddress{sender}, + Keywords: toBoolMapS("test"), + Subject: subject, + MailboxIds: toBoolMapS(mailboxPerRole[JmapMailboxRoleDrafts].Id), + } + updated, _, _, _, err := s.client.CreateEmail(accountId, update, created.Id, session, s.ctx, s.logger, "") + require.NoError(err) + require.NotEmpty(updated.Id) + require.NotEqual(created.Id, updated.Id) + + var updatedMailboxId string + { + emails, notFound, _, _, _, err := s.client.GetEmails(accountId, session, s.ctx, s.logger, "", []string{created.Id, updated.Id}, true, 0, false, false) + require.NoError(err) + require.Len(emails, 1) + require.Len(notFound, 1) + email := emails[0] + require.Equal(updated.Id, email.Id) + require.Len(email.MailboxIds, 1) + require.Contains(email.MailboxIds, mailboxPerRole[JmapMailboxRoleDrafts].Id) + require.Equal(notFound[0], created.Id) + var ok bool + updatedMailboxId, ok = structs.FirstKey(email.MailboxIds) + require.True(ok) + } + + move := MoveMail{ + FromMailboxId: updatedMailboxId, + ToMailboxId: mailboxPerRole[JmapMailboxRoleSent].Id, + } + + sub, _, _, _, err := s.client.SubmitEmail(accountId, identity.Id, updated.Id, &move, session, s.ctx, s.logger, "") + require.NoError(err) + require.NotEmpty(sub.Id) + require.NotEmpty(sub.ThreadId) + require.Equal(updated.Id, sub.EmailId) + require.Equal(identity.Id, sub.IdentityId) + require.Equal(sub.UndoStatus, UndoStatusPending) // this *might* be fragile: if the server is fast enough, would we get "final" here? + require.Empty(sub.DsnBlobIds) + require.Empty(sub.MdnBlobIds) + require.Equal(from.email, sub.Envelope.MailFrom.Email) + require.Nil(sub.Envelope.MailFrom.Parameters) + require.Len(sub.Envelope.RcptTo, 2) + require.Contains(sub.Envelope.RcptTo, Address{Email: to.email}) + require.Contains(sub.Envelope.RcptTo, Address{Email: cc.email}) + require.NotZero(sub.SendAt) + require.Len(sub.DeliveryStatus, 2) + require.Contains(sub.DeliveryStatus, to.email) + require.Contains(sub.DeliveryStatus, cc.email) + + a := 0 + maxAttempts := 3 + delivery := sub.DeliveryStatus[to.email].Delivered + + for delivery != DeliveredYes { + require.NotEqual(DeliveredNo, delivery) + a++ + if a >= maxAttempts { + break + } + time.Sleep(1 * time.Second) + + subs, notFound, _, _, _, err := s.client.GetEmailSubmissionStatus(accountId, []string{sub.Id}, session, s.ctx, s.logger, "") + require.NoError(err) + require.Empty(notFound) + require.Contains(subs, sub.Id) + delivery = subs[sub.Id].DeliveryStatus[to.email].Delivered + } + + require.Contains([]DeliveryStatusDelivered{DeliveredYes, DeliveredUnknown}, delivery) + + for _, r := range []struct { + user User + accountId string + session *Session + }{{to, toAccountId, toSession}, {cc, ccAccountId, ccSession}} { + mailboxes, _, _, _, err := s.client.GetAllMailboxes([]string{r.accountId}, r.session, s.ctx, s.logger, "") + require.NoError(err) + inboxId := "" + for _, mailbox := range mailboxes[r.accountId] { + if mailbox.Role == JmapMailboxRoleInbox { + inboxId = mailbox.Id + require.Equal(1, mailbox.TotalEmails) + } + } + require.NotEmpty(inboxId, "failed to find the Mailbox with the 'inbox' role for %v", r.user.name) + + emails, _, _, _, err := s.client.QueryEmails([]string{r.accountId}, EmailFilterCondition{InMailbox: inboxId}, r.session, s.ctx, s.logger, "", 0, 0, true, 0) + require.NoError(err) + require.Contains(emails, r.accountId) + require.Len(emails[r.accountId].Emails, 1) + received := emails[r.accountId].Emails[0] + require.Len(received.From, 1) + require.Equal(from.email, received.From[0].Email) + require.Equal(fromName, received.From[0].Name) + require.Len(received.Sender, 1) + require.Equal(from.email, received.Sender[0].Email) + require.Equal(from.description, received.Sender[0].Name) + require.Len(received.To, 1) + require.Equal(to.email, received.To[0].Email) + require.Equal(to.description, received.To[0].Name) + require.Len(received.Cc, 1) + require.Equal(cc.email, received.Cc[0].Email) + require.Equal(cc.description, received.Cc[0].Name) + require.Equal(subject, received.Subject) + } + } +} + +func matchEmail(t *testing.T, actual Email, expected filledMail, hasBodies bool) { + require := require.New(t) + require.Len(actual.MessageId, 1) + require.Equal(expected.messageId, actual.MessageId[0]) + require.Equal(expected.subject, actual.Subject) + require.NotEmpty(actual.Preview) + if hasBodies { + require.Len(actual.TextBody, 1) + textBody := actual.TextBody[0] + partId := textBody.PartId + require.Contains(actual.BodyValues, partId) + content := actual.BodyValues[partId].Value + require.True(strings.Contains(content, actual.Preview), "text body contains preview") + } else { + require.Empty(actual.BodyValues) + } + require.ElementsMatch(slices.Collect(maps.Keys(actual.Keywords)), expected.keywords) + + { + list := make([]filledAttachment, len(actual.Attachments)) + for i, a := range actual.Attachments { + list[i] = filledAttachment{ + name: a.Name, + size: a.Size, + mimeType: a.Type, + disposition: a.Disposition, + } + require.NotEmpty(a.BlobId) + require.NotEmpty(a.PartId) + } + + require.ElementsMatch(list, expected.attachments) + } +} + +func (s *StalwartTest) findInbox(t *testing.T, accountId string, session *Session) (string, string) { + require := require.New(t) + respByAccountId, sessionState, _, _, err := s.client.GetAllMailboxes([]string{accountId}, session, s.ctx, s.logger, "") + require.NoError(err) + require.Equal(session.State, sessionState) + require.Len(respByAccountId, 1) + require.Contains(respByAccountId, accountId) + resp := respByAccountId[accountId] + + mailboxesNameByRole := map[string]string{} + mailboxesUnreadByRole := map[string]int{} + for _, m := range resp { + if m.Role != "" { + mailboxesNameByRole[m.Role] = m.Name + mailboxesUnreadByRole[m.Role] = m.UnreadEmails + } + } + require.Contains(mailboxesNameByRole, "inbox") + require.Contains(mailboxesUnreadByRole, "inbox") + require.Zero(mailboxesUnreadByRole["inbox"]) + + inboxId := mailboxId("inbox", resp) + require.NotEmpty(inboxId) + inboxFolder := mailboxesNameByRole["inbox"] + require.NotEmpty(inboxFolder) + return inboxId, inboxFolder +} + +var emailSplitter = regexp.MustCompile("(.+)@(.+)$") + +func htmlFormat(body string, msg enmime.MailBuilder) enmime.MailBuilder { + return msg.HTML([]byte(toHtml(body))) +} + +func textFormat(body string, msg enmime.MailBuilder) enmime.MailBuilder { + return msg.Text([]byte(body)) +} + +func bothFormat(body string, msg enmime.MailBuilder) enmime.MailBuilder { + msg = htmlFormat(body, msg) + msg = textFormat(body, msg) + return msg +} + +var formats = []func(string, enmime.MailBuilder) enmime.MailBuilder{ + htmlFormat, + textFormat, + bothFormat, +} + +type sender struct { + first string + last string + from string + sender string +} + +func (s sender) inject(b enmime.MailBuilder) enmime.MailBuilder { + return b.From(s.first+" "+s.last, s.from).Header("Sender", s.sender) +} + +type senderGenerator struct { + senders []sender +} + +func newSenderGenerator(numSenders int) senderGenerator { + senders := make([]sender, numSenders) + for i := range numSenders { + person := gofakeit.Person() + senders[i] = sender{ + first: person.FirstName, + last: person.LastName, + from: person.Contact.Email, + sender: person.FirstName + " " + person.LastName + "<" + person.Contact.Email + ">", + } + } + return senderGenerator{ + senders: senders, + } +} + +func (s senderGenerator) nextSender() *sender { + if len(s.senders) < 1 { + panic("failed to determine a sender to use") + } else { + return &s.senders[rand.Intn(len(s.senders))] + } +} + +func fakeFilename(extension string) string { + return strings.ReplaceAll(gofakeit.Product().Name, " ", "_") + extension +} + +func mailboxId(role string, mailboxes []Mailbox) string { + for _, m := range mailboxes { + if m.Role == role { + return m.Id + } + } + return "" +} + +type filledAttachment struct { + name string + size int + mimeType string + disposition string +} + +type filledMail struct { + uid int + attachments []filledAttachment + subject string + testId string + messageId string + keywords []string +} + +var allKeywords = map[string]imap.Flag{ + JmapKeywordAnswered: imap.FlagAnswered, + JmapKeywordDraft: imap.FlagDraft, + JmapKeywordFlagged: imap.FlagFlagged, + JmapKeywordForwarded: imap.FlagForwarded, + JmapKeywordJunk: imap.FlagJunk, + JmapKeywordMdnSent: imap.FlagMDNSent, + JmapKeywordNotJunk: imap.FlagNotJunk, + JmapKeywordPhishing: imap.FlagPhishing, + JmapKeywordSeen: imap.FlagSeen, +} + +func (s *StalwartTest) fillEmailsWithImap(folder string, count int, empty bool, user User) ([]filledMail, int, error) { + to := fmt.Sprintf("%s <%s>", user.description, user.email) + ccEvery := 2 + bccEvery := 3 + attachmentEvery := 2 + senders := max(count/4, 1) + maxThreadSize := 6 + maxAttachments := 4 + + tlsConfig := &tls.Config{InsecureSkipVerify: true} + + c, err := imapclient.DialTLS(net.JoinHostPort(s.ip, strconv.Itoa(s.imapPort)), &imapclient.Options{TLSConfig: tlsConfig}) + if err != nil { + return nil, 0, err + } + + defer func(imap *imapclient.Client) { + err := imap.Close() + if err != nil { + log.Fatal(err) + } + }(c) + + if err = c.Login(user.name, user.password).Wait(); err != nil { + return nil, 0, err + } + + if _, err = c.Select(folder, &imap.SelectOptions{ReadOnly: false}).Wait(); err != nil { + return nil, 0, err + } + + if empty { + if ids, err := c.Search(&imap.SearchCriteria{}, nil).Wait(); err != nil { + return nil, 0, err + } else { + if len(ids.AllSeqNums()) > 0 { + storeFlags := imap.StoreFlags{ + Op: imap.StoreFlagsAdd, + Flags: []imap.Flag{imap.FlagDeleted}, + Silent: true, + } + if err = c.Store(ids.All, &storeFlags, nil).Close(); err != nil { + return nil, 0, err + } + if err = c.Expunge().Close(); err != nil { + return nil, 0, err + } + log.Printf("🗑️ deleted %d messages in %s", len(ids.AllSeqNums()), folder) + } else { + log.Printf("ℹ️ did not delete any messages, %s is empty", folder) + } + } + } + + address, err := mail.ParseAddress(to) + if err != nil { + return nil, 0, err + } + displayName := address.Name + + addressParts := emailSplitter.FindAllStringSubmatch(address.Address, 3) + if len(addressParts) != 1 { + return nil, 0, fmt.Errorf("address does not have one part: '%v' -> %v", address.Address, addressParts) + } + if len(addressParts[0]) != 3 { + return nil, 0, fmt.Errorf("first address part does not have a size of 3: '%v'", addressParts[0]) + } + + domain := addressParts[0][2] + + toName := displayName + toAddress := fmt.Sprintf("%s@%s", user.name, domain) + ccName1 := "Team Lead" + ccAddress1 := fmt.Sprintf("lead@%s", domain) + ccName2 := "Coworker" + ccAddress2 := fmt.Sprintf("coworker@%s", domain) + bccName := "HR" + bccAddress := fmt.Sprintf("corporate@%s", domain) + + sg := newSenderGenerator(senders) + thread := 0 + mails := make([]filledMail, count) + for i := 0; i < count; thread++ { + threadMessageId := fmt.Sprintf("%d.%d@%s", time.Now().Unix(), 1000000+rand.Intn(8999999), domain) + threadSubject := strings.Trim(gofakeit.SentenceSimple(), ".") // remove the . at the end, looks weird + threadSize := 1 + rand.Intn(maxThreadSize) + lastMessageId := "" + lastSubject := "" + for t := 0; i < count && t < threadSize; t++ { + sender := sg.nextSender() + + format := formats[i%len(formats)] + text := gofakeit.Paragraph(2+rand.Intn(9), 1+rand.Intn(4), 1+rand.Intn(32), "\n") + + msg := sender.inject(enmime.Builder().To(toName, toAddress)) + + messageId := "" + if lastMessageId == "" { + // start a new thread + msg = msg.Header("Message-ID", threadMessageId).Subject(threadSubject) + lastMessageId = threadMessageId + lastSubject = threadSubject + messageId = threadMessageId + } else { + // we're continuing a thread + messageId = fmt.Sprintf("%d.%d@%s", time.Now().Unix(), 1000000+rand.Intn(8999999), domain) + inReplyTo := "" + subject := "" + switch rand.Intn(2) { + case 0: + // reply to first post in thread + subject = "Re: " + threadSubject + inReplyTo = threadMessageId + default: + // reply to last addition to thread + subject = "Re: " + lastSubject + inReplyTo = lastMessageId + } + msg = msg.Header("Message-ID", messageId).Header("In-Reply-To", inReplyTo).Subject(subject) + lastMessageId = messageId + lastSubject = subject + } + + if i%ccEvery == 0 { + msg = msg.CCAddrs([]mail.Address{{Name: ccName1, Address: ccAddress1}, {Name: ccName2, Address: ccAddress2}}) + } + if i%bccEvery == 0 { + msg = msg.BCC(bccName, bccAddress) + } + + numAttachments := 0 + attachments := []filledAttachment{} + if maxAttachments > 0 && i%attachmentEvery == 0 { + numAttachments = rand.Intn(maxAttachments) + for a := range numAttachments { + switch rand.Intn(2) { + case 0: + filename := fakeFilename(".txt") + attachment := gofakeit.Paragraph(2+rand.Intn(4), 1+rand.Intn(4), 1+rand.Intn(32), "\n") + data := []byte(attachment) + msg = msg.AddAttachment(data, "text/plain", filename) + attachments = append(attachments, filledAttachment{ + name: filename, + size: len(data), + mimeType: "text/plain", + disposition: "attachment", + }) + default: + filename := "" + mimetype := "" + var image []byte = nil + switch rand.Intn(2) { + case 0: + filename = fakeFilename(".png") + mimetype = "image/png" + image = gofakeit.ImagePng(512, 512) + default: + filename = fakeFilename(".jpg") + mimetype = "image/jpeg" + image = gofakeit.ImageJpeg(400, 200) + } + disposition := "" + switch rand.Intn(2) { + case 0: + msg = msg.AddAttachment(image, mimetype, filename) + disposition = "attachment" + default: + msg = msg.AddInline(image, mimetype, filename, "c"+strconv.Itoa(a)) + disposition = "inline" + } + attachments = append(attachments, filledAttachment{ + name: filename, + size: len(image), + mimeType: mimetype, + disposition: disposition, + }) + } + } + } + + msg = format(text, msg) + + flags := []imap.Flag{} + keywords := pickRandomlyFromMap(allKeywords, 0, len(allKeywords)) + for _, f := range keywords { + flags = append(flags, f) + } + + buf := new(bytes.Buffer) + part, _ := msg.Build() + part.Encode(buf) + mail := buf.String() + + var options *imap.AppendOptions = nil + if len(flags) > 0 { + options = &imap.AppendOptions{Flags: flags} + } + + size := int64(len(mail)) + appendCmd := c.Append(folder, size, options) + if _, err := appendCmd.Write([]byte(mail)); err != nil { + return nil, 0, err + } + if err := appendCmd.Close(); err != nil { + return nil, 0, err + } + if appendData, err := appendCmd.Wait(); err != nil { + return nil, 0, err + } else { + attachmentStr := "" + if numAttachments > 0 { + attachmentStr = " " + strings.Repeat("📎", numAttachments) + } + log.Printf("➕ appended %v/%v [in thread %v] uid=%v%s", i+1, count, thread+1, appendData.UID, attachmentStr) + + mails[i] = filledMail{ + uid: int(appendData.UID), + attachments: attachments, + subject: msg.GetSubject(), + messageId: messageId, + keywords: slices.Collect(maps.Keys(keywords)), + } + } + + i++ + } + } + + listCmd := c.List("", "%", &imap.ListOptions{ + ReturnStatus: &imap.StatusOptions{ + NumMessages: true, + NumUnseen: true, + }, + }) + countMap := map[string]int{} + for { + mbox := listCmd.Next() + if mbox == nil { + break + } + countMap[mbox.Mailbox] = int(*mbox.Status.NumMessages) + } + + inboxCount := -1 + for f, i := range countMap { + if strings.Compare(strings.ToLower(f), strings.ToLower(folder)) == 0 { + inboxCount = i + break + } + } + if err = listCmd.Close(); err != nil { + return nil, 0, err + } + if inboxCount == -1 { + return nil, 0, fmt.Errorf("failed to find folder '%v' via IMAP", folder) + } + if empty && count != inboxCount { + return nil, 0, fmt.Errorf("wrong number of emails in the inbox after filling, expecting %v, has %v", count, inboxCount) + } + + return mails, thread, nil +} diff --git a/pkg/jmap/jmap_integration_event_test.go b/pkg/jmap/jmap_integration_event_test.go new file mode 100644 index 0000000000..a4d1ee76b5 --- /dev/null +++ b/pkg/jmap/jmap_integration_event_test.go @@ -0,0 +1,621 @@ +package jmap + +import ( + "encoding/base64" + "encoding/json" + "fmt" + "log" + "math" + "math/rand" + "strconv" + "strings" + "testing" + "time" + + "github.com/brianvoe/gofakeit/v7" + "github.com/stretchr/testify/require" + + "github.com/opencloud-eu/opencloud/pkg/jscalendar" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +// fields that are currently unsupported in Stalwart +const ( + EnableEventMayInviteFields = false + EnableEventParticipantDescriptionFields = false +) + +func TestEvents(t *testing.T) { + if skip(t) { + return + } + + count := uint(20 + rand.Intn(30)) + + require := require.New(t) + + s, err := newStalwartTest(t) + require.NoError(err) + defer s.Close() + + user := pickUser() + session := s.Session(user.name) + + accountId, calendarId, expectedEventsById, boxes, err := s.fillEvents(t, count, session, user) + require.NoError(err) + require.NotEmpty(accountId) + require.NotEmpty(calendarId) + + filter := CalendarEventFilterCondition{ + InCalendar: calendarId, + } + sortBy := []CalendarEventComparator{ + {Property: CalendarEventPropertyStart, IsAscending: true}, + } + + contactsByAccount, _, _, _, err := s.client.QueryCalendarEvents([]string{accountId}, session, t.Context(), s.logger, "", filter, sortBy, 0, 0) + require.NoError(err) + + require.Len(contactsByAccount, 1) + require.Contains(contactsByAccount, accountId) + contacts := contactsByAccount[accountId] + require.Len(contacts, int(count)) + + for _, actual := range contacts { + expected, ok := expectedEventsById[actual.Id] + require.True(ok, "failed to find created contact by its id") + matchEvent(t, actual, expected) + } + + exceptions := []string{} + if !EnableEventMayInviteFields { + exceptions = append(exceptions, "mayInvite") + } + allBoxesAreTicked(t, boxes, exceptions...) +} + +func matchEvent(t *testing.T, actual CalendarEvent, expected CalendarEvent) { + //require.Equal(t, expected, actual) + deepEqual(t, expected, actual) +} + +type EventsBoxes struct { + categories bool + keywords bool + mayInvite bool +} + +func (s *StalwartTest) fillEvents( + t *testing.T, + count uint, + session *Session, + user User, +) (string, string, map[string]CalendarEvent, EventsBoxes, error) { + require := require.New(t) + c, err := NewTestJmapClient(session, user.name, user.password, true, true) + require.NoError(err) + defer c.Close() + + boxes := EventsBoxes{} + + printer := func(s string) { log.Println(s) } + + accountId := c.session.PrimaryAccounts.Calendars + require.NotEmpty(accountId, "no primary account for calendars in session") + + calendarId := "" + { + calendarsById, err := c.objectsById(accountId, CalendarType, JmapCalendars) + require.NoError(err) + + for id, calendar := range calendarsById { + if isDefault, ok := calendar["isDefault"]; ok { + if isDefault.(bool) { + calendarId = id + break + } + } + } + } + require.NotEmpty(calendarId) + + filled := map[string]CalendarEvent{} + for i := range count { + uid := gofakeit.UUID() + + isDraft := false + mainLocationId := "" + locationIds := []string{} + locationMaps := map[string]map[string]any{} + locationObjs := map[string]jscalendar.Location{} + { + n := 1 + if i%4 == 0 { + n++ + } + for range n { + locationId, locationMap, locationObj := pickLocation() + locationMaps[locationId] = locationMap + locationObjs[locationId] = locationObj + locationIds = append(locationIds, locationId) + if n > 0 && mainLocationId == "" { + mainLocationId = locationId + } + } + } + virtualLocationId, virtualLocationMap, virtualLocationObj := pickVirtualLocation() + participantMaps, participantObjs, organizerEmail := createParticipants(uid, locationIds, []string{virtualLocationId}) + duration := pickRandom("PT30M", "PT45M", "PT1H", "PT90M") + tz := pickRandom(timezones...) + daysDiff := rand.Intn(31) - 15 + t := time.Now().Add(time.Duration(daysDiff) * time.Hour * 24) + h := pickRandom(9, 10, 11, 14, 15, 16, 18) + m := pickRandom(0, 30) + t = time.Date(t.Year(), t.Month(), t.Day(), h, m, 0, 0, t.Location()) + start := strings.ReplaceAll(t.Format(time.DateTime), " ", "T") + title := gofakeit.Sentence(1) + description := gofakeit.Paragraph(1+rand.Intn(3), 1+rand.Intn(4), 1+rand.Intn(32), "\n") + + descriptionFormat := pickRandom("text/plain", "text/html") + if descriptionFormat == "text/html" { + description = toHtml(description) + } + status := pickRandom(jscalendar.Statuses...) + freeBusy := pickRandom(jscalendar.FreeBusyStatuses...) + privacy := pickRandom(jscalendar.Privacies...) + color := pickRandom(basicColors...) + locale := pickLocale() + keywords := pickKeywords() + categories := pickCategories() + + sequence := 0 + + alertId := id() + alertOffset := pickRandom("-PT5M", "-PT10M", "-PT15M") + + event := map[string]any{ + "@type": "Event", + "calendarIds": toBoolMapS(calendarId), + "isDraft": isDraft, + "start": start, + "duration": duration, + "status": string(status), + "uid": uid, + "prodId": productName, + "title": title, + "description": description, + "descriptionContentType": descriptionFormat, + "locale": locale, + "color": color, + "sequence": sequence, + "showWithoutTime": false, + "freeBusyStatus": string(freeBusy), + "privacy": string(privacy), + "sentBy": organizerEmail, + "participants": participantMaps, + "timeZone": tz, + "hideAttendees": false, + "replyTo": map[string]string{ + "imip": "mailto:" + organizerEmail, + }, + "locations": locationMaps, + "virtualLocations": map[string]any{ + virtualLocationId: virtualLocationMap, + }, + "alerts": map[string]map[string]any{ + alertId: { + "@type": "Alert", + "trigger": map[string]any{ + "@type": "OffsetTrigger", + "offset": alertOffset, + "relativeTo": "start", + }, + }, + }, + } + + obj := CalendarEvent{ + Id: "", + CalendarIds: toBoolMapS(calendarId), + IsDraft: isDraft, + IsOrigin: true, + Event: jscalendar.Event{ + Type: jscalendar.EventType, + Start: jscalendar.LocalDateTime(start), + Duration: jscalendar.Duration(duration), + Status: status, + Object: jscalendar.Object{ + CommonObject: jscalendar.CommonObject{ + Uid: uid, + ProdId: productName, + Title: title, + Description: description, + DescriptionContentType: descriptionFormat, + Locale: locale, + Color: color, + }, + Sequence: uint(sequence), + ShowWithoutTime: false, + FreeBusyStatus: freeBusy, + Privacy: privacy, + SentBy: organizerEmail, + Participants: participantObjs, + TimeZone: tz, + HideAttendees: false, + ReplyTo: map[jscalendar.ReplyMethod]string{ + jscalendar.ReplyMethodImip: "mailto:" + organizerEmail, + }, + Locations: locationObjs, + VirtualLocations: map[string]jscalendar.VirtualLocation{ + virtualLocationId: virtualLocationObj, + }, + Alerts: map[string]jscalendar.Alert{ + alertId: { + Type: jscalendar.AlertType, + Trigger: jscalendar.OffsetTrigger{ + Type: jscalendar.OffsetTriggerType, + Offset: jscalendar.SignedDuration(alertOffset), + RelativeTo: jscalendar.RelativeToStart, + }, + }, + }, + }, + }, + } + + if EnableEventMayInviteFields { + event["mayInviteSelf"] = true + event["mayInviteOthers"] = true + obj.MayInviteSelf = true + obj.MayInviteOthers = true + boxes.mayInvite = true + } + + if len(keywords) > 0 { + event["keywords"] = keywords + obj.Keywords = keywords + boxes.keywords = true + } + + if len(categories) > 0 { + event["categories"] = categories + obj.Categories = categories + boxes.categories = true + } + + if mainLocationId != "" { + event["mainLocationId"] = mainLocationId + obj.MainLocationId = mainLocationId + } + + err = propmap(i%2 == 0, 1, 1, event, "links", &obj.Links, func(int, string) (map[string]any, jscalendar.Link, error) { + mime := "" + uri := "" + rel := jscalendar.RelAbout + switch rand.Intn(2) { + case 0: + size := pickRandom(16, 24, 32, 48, 64) + img := gofakeit.ImagePng(size, size) + mime = "image/png" + uri = "data:" + mime + ";base64," + base64.StdEncoding.EncodeToString(img) + default: + mime = "image/jpeg" + uri = externalImageUri() + } + return map[string]any{ + "@type": "Link", + "href": uri, + "contentType": mime, + "rel": string(rel), + }, jscalendar.Link{ + Type: jscalendar.LinkType, + Href: uri, + ContentType: mime, + Rel: rel, + }, nil + }) + + if rand.Intn(10) > 7 { + frequency := pickRandom(jscalendar.FrequencyWeekly, jscalendar.FrequencyDaily) + interval := pickRandom(1, 2) + count := 1 + if frequency == jscalendar.FrequencyWeekly { + count = 1 + rand.Intn(8) + } else { + count = 1 + rand.Intn(4) + } + event["recurrenceRule"] = map[string]any{ + "@type": "RecurrenceRule", + "frequency": string(frequency), + "interval": interval, + "rscale": string(jscalendar.RscaleIso8601), + "skip": string(jscalendar.SkipOmit), + "firstDayOfWeek": string(jscalendar.DayOfWeekMonday), + "count": count, + } + rr := jscalendar.RecurrenceRule{ + Type: jscalendar.RecurrenceRuleType, + Frequency: frequency, + Interval: uint(interval), + Rscale: jscalendar.RscaleIso8601, + Skip: jscalendar.SkipOmit, + FirstDayOfWeek: jscalendar.DayOfWeekMonday, + Count: uint(count), + } + obj.RecurrenceRule = &rr + } + + id, err := s.CreateEvent(c, accountId, event) + if err != nil { + return accountId, calendarId, nil, boxes, err + } + + obj.Id = id + filled[id] = obj + + printer(fmt.Sprintf("📅 created %*s/%v id=%v", int(math.Log10(float64(count))+1), strconv.Itoa(int(i+1)), count, uid)) + } + return accountId, calendarId, filled, boxes, nil +} + +func (s *StalwartTest) CreateEvent(j *TestJmapClient, accountId string, event map[string]any) (string, error) { + return j.create1(accountId, CalendarEventType, JmapCalendars, event) +} + +var rooms = []jscalendar.Location{ + { + Type: jscalendar.LocationType, + Name: "Office meeting room upstairs", + LocationTypes: toBoolMapS(jscalendar.LocationTypeOptionOffice), + Coordinates: "geo:52.5335389,13.4103296", + Links: map[string]jscalendar.Link{ + "l1": {Href: "https://www.heinlein-support.de/"}, + }, + }, + { + Type: jscalendar.LocationType, + Name: "office-nue", + LocationTypes: toBoolMapS(jscalendar.LocationTypeOptionOffice), + Coordinates: "geo:49.4723337,11.1042282", + Links: map[string]jscalendar.Link{ + "l2": {Href: "https://www.workandpepper.de/"}, + }, + }, + { + Type: jscalendar.LocationType, + Name: "Meetingraum Prenzlauer Berg", + LocationTypes: toBoolMapS(jscalendar.LocationTypeOptionOffice, jscalendar.LocationTypeOptionPublic), + Coordinates: "geo:52.554222,13.4142387", + Links: map[string]jscalendar.Link{ + "l3": {Href: "https://www.spacebase.com/en/venue/meeting-room-prenzlauer-be-11499/"}, + }, + }, + { + Type: jscalendar.LocationType, + Name: "Meetingraum LIANE 1", + LocationTypes: toBoolMapS(jscalendar.LocationTypeOptionOffice, jscalendar.LocationTypeOptionLibrary), + Coordinates: "geo:52.4854301,13.4224763", + Links: map[string]jscalendar.Link{ + "l4": {Href: "https://www.spacebase.com/en/venue/rent-a-jungle-8372/"}, + }, + }, + { + Type: jscalendar.LocationType, + Name: "Dark Horse", + LocationTypes: toBoolMapS(jscalendar.LocationTypeOptionOffice), + Coordinates: "geo:52.4942254,13.4346015", + Links: map[string]jscalendar.Link{ + "l5": {Href: "https://www.spacebase.com/en/event-venue/workshop-white-space-2667/"}, + }, + }, +} + +var virtualRooms = []jscalendar.VirtualLocation{ + { + Type: jscalendar.VirtualLocationType, + Name: "opentalk", + Uri: "https://meet.opentalk.eu/fake/room/06fb8f7d-42eb-4212-8112-769fac2cb111", + Features: toBoolMapS( + jscalendar.VirtualLocationFeatureAudio, + jscalendar.VirtualLocationFeatureChat, + jscalendar.VirtualLocationFeatureVideo, + jscalendar.VirtualLocationFeatureScreen, + ), + }, +} + +func pickLocation() (string, map[string]any, jscalendar.Location) { + locationId := id() + room := rooms[rand.Intn(len(rooms))] + b, err := json.Marshal(room) + if err != nil { + panic(err) + } + var m map[string]any + err = json.Unmarshal(b, &m) + if err != nil { + panic(err) + } + return locationId, m, room +} + +func pickVirtualLocation() (string, map[string]any, jscalendar.VirtualLocation) { + locationId := id() + vroom := virtualRooms[rand.Intn(len(virtualRooms))] + b, err := json.Marshal(vroom) + if err != nil { + panic(err) + } + var m map[string]any + err = json.Unmarshal(b, &m) + if err != nil { + panic(err) + } + return locationId, m, vroom +} + +var ChairRoles = toBoolMapS(jscalendar.RoleChair, jscalendar.RoleOwner) +var RegularRoles = toBoolMapS(jscalendar.RoleOptional) + +func createParticipants(uid string, locationIds []string, virtualLocationIds []string) (map[string]map[string]any, map[string]jscalendar.Participant, string) { + options := structs.Concat(locationIds, virtualLocationIds) + n := 1 + rand.Intn(4) + maps := map[string]map[string]any{} + objs := map[string]jscalendar.Participant{} + organizerId, organizerEmail, organizerMap, organizerObj := createParticipant(0, uid, pickRandom(options...), "", "") + maps[organizerId] = organizerMap + objs[organizerId] = organizerObj + for i := 1; i < n; i++ { + id, _, participantMap, participantObj := createParticipant(i, uid, pickRandom(options...), organizerId, organizerEmail) + maps[id] = participantMap + objs[id] = participantObj + } + return maps, objs, organizerEmail +} + +func createParticipant(i int, uid string, locationId string, organizerEmail string, organizerId string) (string, string, map[string]any, jscalendar.Participant) { + participantId := id() + person := gofakeit.Person() + roles := RegularRoles + if i == 0 { + roles = ChairRoles + } + status := jscalendar.ParticipationStatusAccepted + if i != 0 { + status = pickRandom( + jscalendar.ParticipationStatusNeedsAction, + jscalendar.ParticipationStatusAccepted, + jscalendar.ParticipationStatusDeclined, + jscalendar.ParticipationStatusTentative, + ) + //, delegated + set "delegatedTo" + } + statusComment := "" + if rand.Intn(5) >= 3 { + statusComment = gofakeit.HipsterSentence(1 + rand.Intn(5)) + } + if i == 0 { + organizerEmail = person.Contact.Email + organizerId = participantId + } + + name := person.FirstName + " " + person.LastName + email := person.Contact.Email + description := gofakeit.SentenceSimple() + descriptionContentType := pickRandom("text/html", "text/plain") + if descriptionContentType == "text/html" { + description = toHtml(description) + } + language := pickLanguage() + updated := "2025-10-01T01:59:12Z" + updatedTime, err := time.Parse(time.RFC3339, updated) + if err != nil { + panic(err) + } + + var calendarAddress string + { + pos := strings.LastIndex(email, "@") + if pos < 0 { + calendarAddress = email + } else { + local := email[0:pos] + domain := email[pos+1:] + calendarAddress = local + "+itip+" + uid + "@" + "itip." + domain + } + } + + m := map[string]any{ + "@type": "Participant", + "name": name, + "email": email, + "calendarAddress": calendarAddress, + "kind": "individual", + "roles": structs.MapKeys(roles, func(r jscalendar.Role) string { return string(r) }), + "locationId": locationId, + "language": language, + "participationStatus": string(status), + "participationComment": statusComment, + "expectReply": true, + "scheduleAgent": "server", + "scheduleSequence": 1, + "scheduleStatus": []string{"1.0"}, + "scheduleUpdated": updated, + "sentBy": organizerEmail, + "invitedBy": organizerId, + "scheduleId": "mailto:" + email, + } + o := jscalendar.Participant{ + Type: jscalendar.ParticipantType, + Name: name, + Email: email, + Kind: jscalendar.ParticipantKindIndividual, + CalendarAddress: calendarAddress, + Roles: roles, + LocationId: locationId, + Language: language, + ParticipationStatus: status, + ParticipationComment: statusComment, + ExpectReply: true, + ScheduleAgent: jscalendar.ScheduleAgentServer, + ScheduleSequence: uint(1), + ScheduleStatus: []string{"1.0"}, + ScheduleUpdated: updatedTime, + SentBy: organizerEmail, + InvitedBy: organizerId, + ScheduleId: "mailto:" + email, + } + + if EnableEventParticipantDescriptionFields { + m["description"] = description + m["descriptionContentType"] = descriptionContentType + o.Description = description + o.DescriptionContentType = descriptionContentType + } + + err = propmap(i%2 == 0, 1, 2, m, "links", &o.Links, func(int, string) (map[string]any, jscalendar.Link, error) { + href := externalImageUri() + title := person.FirstName + "'s Cake Day pick" + return map[string]any{ + "@type": "Link", + "href": href, + "contentType": "image/jpeg", + "rel": "icon", + "display": "badge", + "title": title, + }, jscalendar.Link{ + Type: jscalendar.LinkType, + Href: href, + ContentType: "image/jpeg", + Rel: jscalendar.RelIcon, + Display: jscalendar.DisplayBadge, + Title: title, + }, nil + }) + if err != nil { + panic(err) + } + + return participantId, person.Contact.Email, m, o +} + +var Keywords = []string{ + "office", + "important", + "sales", + "coordination", + "decision", +} + +func pickKeywords() map[string]bool { + return toBoolMap(pickRandoms(Keywords...)) +} + +var Categories = []string{ + "http://opencloud.eu/categories/secret", + "http://opencloud.eu/categories/internal", +} + +func pickCategories() map[string]bool { + return toBoolMap(pickRandoms(Categories...)) +} diff --git a/pkg/jmap/jmap_integration_test.go b/pkg/jmap/jmap_integration_test.go new file mode 100644 index 0000000000..dbc8a17ea0 --- /dev/null +++ b/pkg/jmap/jmap_integration_test.go @@ -0,0 +1,1199 @@ +package jmap + +import ( + "bytes" + "context" + "crypto/tls" + "encoding/json" + "fmt" + "io" + "log" + "maps" + "math/rand" + "net/http" + "net/http/httputil" + "net/url" + "os" + "reflect" + "regexp" + "slices" + "strings" + "testing" + "text/template" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/require" + + "github.com/gorilla/websocket" + "github.com/tidwall/pretty" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/wait" + + "github.com/brianvoe/gofakeit/v7" + pw "github.com/sethvargo/go-password/password" + + "github.com/opencloud-eu/opencloud/pkg/jscontact" + clog "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" + + "github.com/go-crypt/crypt/algorithm/shacrypt" +) + +const ( + EnableTypes = false + + // Wireshark = "/usr/bin/wireshark" + Wireshark = "" +) + +type User struct { + name string + description string + email string + password string +} + +func userpassword() string { + password, err := pw.Generate(4+rand.Intn(28), 2, 0, false, true) + if err != nil { + panic(err) + } + return password +} + +var ( + domains = [...]string{"earth.gov", "mars.mil", "opa.org"} + users = [...]User{ + {"cdrummer", "Camina Drummer", "camina.drummer@opa.org", userpassword()}, + {"aburton", "Amos Burton", "amos.burton@earth.gov", userpassword()}, + {"jholden", "James Holden", "james.holden@earth.gov", userpassword()}, + {"adawes", "Anderson Dawes", "anderson.dawes@opa.org", userpassword()}, + {"nnagata", "Naomi Nagata", "naomi.nagata@opa.org", userpassword()}, + {"kashford", "Klaes Ashford", "klaes.ashford@opa.org", userpassword()}, + {"fjohnson", "Fred Johnson", "fred.johnson@opa.org", userpassword()}, + {"cavasarala", "Chrisjen Avasarala}", "chrissy@earth.gov", userpassword()}, + {"bdraper", "Roberta Draper", "bobby@mars.mil", userpassword()}, + } +) + +const ( + stalwartImage = "ghcr.io/stalwartlabs/stalwart:v0.15.0-alpine" + httpPort = "8080" + imapsPort = "993" + configTemplate = ` +authentication.fallback-admin.secret = "secret" +authentication.fallback-admin.user = "mailadmin" +authentication.master.secret = "{{.masterpassword}}" +authentication.master.user = "{{.masterusername}}" +directory.test.bind.auth.method = "default" +directory.test.cache.size = 1048576 +directory.test.cache.ttl.negative = "10m" +directory.test.cache.ttl.positive = "1h" +directory.test.store = "rocksdb" +directory.test.type = "internal" +metrics.prometheus.enable = false +server.listener.http.bind = "[::]:{{.httpPort}}" +server.listener.http.protocol = "http" +server.listener.imaptls.bind = "[::]:{{.imapsPort}}" +server.listener.imaptls.protocol = "imap" +server.listener.imaptls.tls.implicit = true +server.hostname = "{{.hostname}}" +server.max-connections = 8192 +server.socket.backlog = 1024 +server.socket.nodelay = true +server.socket.reuse-addr = true +server.socket.reuse-port = true +storage.blob = "rocksdb" +storage.data = "rocksdb" +storage.directory = "test" +storage.fts = "rocksdb" +storage.lookup = "rocksdb" +store.rocksdb.compression = "lz4" +store.rocksdb.path = "/opt/stalwart/data" +store.rocksdb.type = "rocksdb" +tracer.log.ansi = false +tracer.log.buffered = false +tracer.log.enable = true +tracer.log.level = "trace" +tracer.log.lossy = false +tracer.log.multiline = false +tracer.log.type = "stdout" +sharing.allow-directory-query = false +auth.dkim.sign = false +auth.dkim.verify = "disable" +auth.spf.verify.ehlo = "disable" +auth.spf.verify.mail-from = "disable" +auth.arc.verify = "disable" +auth.arc.seal = false +auth.dmarc.verify = "disable" +auth.iprev.verify = "disable" +` +) + +func skip(t *testing.T) bool { + if os.Getenv("CI") == "woodpecker" { + t.Skip("Skipping tests because CI==wookpecker") + return true + } + if os.Getenv("CI_SYSTEM_NAME") == "woodpecker" { + t.Skip("Skipping tests because CI_SYSTEM_NAME==wookpecker") + return true + } + if os.Getenv("USE_TESTCONTAINERS") == "false" { + t.Skip("Skipping tests because USE_TESTCONTAINERS==false") + return true + } + return false +} + +type StalwartTest struct { + t *testing.T + ip string + imapPort int + container *testcontainers.DockerContainer + ctx context.Context + cancelCtx context.CancelFunc + client *Client + logger *clog.Logger + jmapBaseUrl *url.URL + sessionUrl *url.URL + + io.Closer +} + +func (s *StalwartTest) Close() error { + if s.container != nil { + var c testcontainers.Container = s.container + testcontainers.CleanupContainer(s.t, c) + } + if s.cancelCtx != nil { + s.cancelCtx() + } + return nil +} + +func (s *StalwartTest) Session(username string) *Session { + session, jerr := s.client.FetchSession(s.sessionUrl, username, s.logger) + require.NoError(s.t, jerr) + require.NotNil(s.t, session.Capabilities.Mail) + require.NotNil(s.t, session.Capabilities.Calendars) + require.NotNil(s.t, session.Capabilities.Contacts) + + // we have to overwrite the hostname in JMAP URL because the container + // will know its name to be a random Docker container identifier, or + // "localhost" as we defined the hostname in the Stalwart configuration, + // and we also need to overwrite the port number as its not mapped + session.JmapUrl.Host = s.jmapBaseUrl.Host + session.WebsocketUrl.Host = s.jmapBaseUrl.Host + var err error + session.ApiUrl, err = replaceHost(session.ApiUrl, s.jmapBaseUrl.Host) + require.NoError(s.t, err) + session.DownloadUrl, err = replaceHost(session.DownloadUrl, s.jmapBaseUrl.Host) + require.NoError(s.t, err) + session.UploadUrl, err = replaceHost(session.UploadUrl, s.jmapBaseUrl.Host) + require.NoError(s.t, err) + session.EventSourceUrl, err = replaceHost(session.EventSourceUrl, s.jmapBaseUrl.Host) + require.NoError(s.t, err) + + return &session +} + +type stalwartTestLogConsumer struct{} + +func (lc *stalwartTestLogConsumer) Accept(l testcontainers.Log) { + fmt.Print("STALWART: " + string(l.Content)) +} + +func newStalwartTest(t *testing.T) (*StalwartTest, error) { + ctx, cancel := context.WithTimeout(context.Background(), 20*time.Second) + var _ context.CancelFunc = cancel // ignore context leak warning: it is passed in the struct and called in Close() + + // A master user name different from "master" does not seem to work as of the current Stalwart version + //masterUsernameSuffix, err := pw.Generate(4+rand.Intn(28), 2, 0, false, true) + //require.NoError(err) + masterUsername := "master" //"master_" + masterUsernameSuffix + + masterPassword, err := pw.Generate(4+rand.Intn(28), 2, 0, false, true) + if err != nil { + return nil, err + } + masterPasswordHash := "" + { + hasher, err := shacrypt.New(shacrypt.WithSHA512(), shacrypt.WithIterations(shacrypt.IterationsDefaultOmitted)) + if err != nil { + return nil, err + } + + digest, err := hasher.Hash(masterPassword) + if err != nil { + return nil, err + } + masterPasswordHash = digest.Encode() + } + + hostname := "localhost" + + configBuf := bytes.NewBufferString("") + template.Must(template.New("").Parse(configTemplate)).Execute(configBuf, map[string]any{ + "hostname": hostname, + "masterusername": masterUsername, + "masterpassword": masterPasswordHash, + "httpPort": httpPort, + "imapsPort": imapsPort, + }) + config := configBuf.String() + configReader := strings.NewReader(config) + + container, err := testcontainers.Run( + ctx, + stalwartImage, + testcontainers.WithLogConsumers(&stalwartTestLogConsumer{}), + testcontainers.WithExposedPorts(httpPort+"/tcp", imapsPort+"/tcp"), + testcontainers.WithFiles(testcontainers.ContainerFile{ + Reader: configReader, + ContainerFilePath: "/opt/stalwart/etc/config.toml", + FileMode: 0o700, + }), + testcontainers.WithWaitStrategyAndDeadline( + 30*time.Second, + wait.ForLog(`Network listener started (network.listen-start) listenerId = "imaptls"`), + wait.ForLog(`Network listener started (network.listen-start) listenerId = "http"`), + ), + ) + + success := false + defer func() { + if !success { + testcontainers.CleanupContainer(t, container) + } + }() + + ip, err := container.Host(ctx) + if err != nil { + return nil, err + } + + imapPort, err := container.MappedPort(ctx, "993") + if err != nil { + return nil, err + } + + tlsConfig := &tls.Config{InsecureSkipVerify: true} + + loggerImpl := clog.NewLogger(clog.Level("trace")) + logger := &loggerImpl + var j Client + var jmapBaseUrl *url.URL + var sessionUrl *url.URL + { + tr := http.DefaultTransport.(*http.Transport).Clone() + tr.ResponseHeaderTimeout = time.Duration(30 * time.Second) + tr.TLSClientConfig = tlsConfig + jh := *http.DefaultClient + jh.Transport = tr + + wsd := &websocket.Dialer{ + TLSClientConfig: tlsConfig, + HandshakeTimeout: time.Duration(10) * time.Second, + } + + jmapPort, err := container.MappedPort(ctx, httpPort) + if err != nil { + return nil, err + } + jmapBaseUrl = &url.URL{ + Scheme: "http", + Host: ip + ":" + jmapPort.Port(), + Path: "/", + } + sessionUrl = jmapBaseUrl.JoinPath(".well-known", "jmap") + + if Wireshark != "" { + fmt.Printf("\x1b[45;37;1m Starting Wireshark on port %v \x1b[0m\n", jmapPort) + attr := os.ProcAttr{ + Dir: ".", + Env: os.Environ(), + Files: []*os.File{os.Stdin, os.Stdout, os.Stderr}, + } + cmd := []string{Wireshark, "-pkSl", "-i", "lo", "-f", fmt.Sprintf("port %d", jmapPort.Int()), "-Y", "http||websocket"} + process, err := os.StartProcess(Wireshark, cmd, &attr) + require.NoError(t, err) + err = process.Release() + require.NoError(t, err) + + time.Sleep(10 * time.Second) + } + + eventListener := nullHttpJmapApiClientEventListener{} + + api := NewHttpJmapClient( + &jh, + masterUsername, + masterPassword, + eventListener, + ) + + wscf, err := NewHttpWsClientFactory(wsd, masterUsername, masterPassword, logger, eventListener) + if err != nil { + return nil, err + } + + j = NewClient(api, api, api, wscf) + } + + // provision some things using Stalwart's Management API + { + var h http.Client + { + tr := http.DefaultTransport.(*http.Transport).Clone() + tr.ResponseHeaderTimeout = time.Duration(30 * time.Second) + tr.TLSClientConfig = tlsConfig + h = *http.DefaultClient + h.Transport = tr + } + + apiPort, err := container.MappedPort(ctx, httpPort) + require.NoError(t, err) + + url := fmt.Sprintf("http://%s:%d/api/principal", ip, apiPort.Int()) + + for _, domain := range domains { + fmt.Printf("Creating domain '%v'\n", domain) + bb, err := json.Marshal(map[string]any{ + "type": "domain", + "name": domain, + "description": domain, + }) + require.NoError(t, err) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(bb)) + require.NoError(t, err) + req.SetBasicAuth("mailadmin", "secret") + resp, err := h.Do(req) + require.NoError(t, err) + require.Equal(t, "200 OK", resp.Status) + } + + for _, user := range users { + fmt.Printf("Creating individual '%v'\n", user.name) + bb, err := json.Marshal(map[string]any{ + "type": "individual", + "name": user.name, + "description": user.description, + "emails": user.email, + "roles": []string{"user"}, + "secrets": user.password, + "quota": 20000000000, + }) + require.NoError(t, err) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(bb)) + require.NoError(t, err) + req.SetBasicAuth("mailadmin", "secret") + resp, err := h.Do(req) + require.NoError(t, err) + require.Equal(t, "200 OK", resp.Status) + + // fetch the user once with the superadmin credentials to "activate" it, + // it is unclear why that is needed, but without that, we get errors back + // that we are not allowed to access that resource + { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + require.NoError(t, err) + req.SetBasicAuth("mailadmin", "secret") + resp, err := h.Do(req) + require.NoError(t, err) + require.Equal(t, "200 OK", resp.Status) + } + } + + { + require.NoError(t, err) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + require.NoError(t, err) + req.SetBasicAuth("mailadmin", "secret") + resp, err := h.Do(req) + require.NoError(t, err) + require.Equal(t, "200 OK", resp.Status) + var list struct { + Data struct { + Total int `json:"total"` + Items []struct { + Type string `json:"type"` + Id int `json:"id"` + Name string `json:"name"` + Emails []string `json:"emails"` + Roles []string `json:"roles"` + } `json:"items"` + } `json:"data"` + } + bb, err := io.ReadAll(resp.Body) + require.NoError(t, err) + defer resp.Body.Close() + err = json.Unmarshal(bb, &list) + require.NoError(t, err) + individuals := []struct { + Id int + Name string + Emails []string + Roles []string + }{} + for _, p := range list.Data.Items { + if p.Type == "individual" { + individuals = append(individuals, struct { + Id int + Name string + Emails []string + Roles []string + }{p.Id, p.Name, p.Emails, p.Roles}) + } + } + + require.Equal(t, len(users), len(individuals)) + } + + { + // check whether we can fetch a session for the provisioned users + for _, user := range users { + session, err := j.FetchSession(sessionUrl, user.name, logger) + require.NoError(t, err, "failed to retrieve JMAP session for newly created principal '%s'", user.name) + require.Equal(t, user.name, session.Username) + } + } + } + + success = true + return &StalwartTest{ + t: t, + ip: ip, + imapPort: imapPort.Int(), + container: container, + ctx: ctx, + cancelCtx: cancel, + client: &j, + logger: logger, + jmapBaseUrl: jmapBaseUrl, + sessionUrl: sessionUrl, + }, nil +} + +var urlHostRegex = regexp.MustCompile(`^(https?://)(.+?)/(.+)$`) + +func replaceHost(u string, host string) (string, error) { + if m := urlHostRegex.FindAllStringSubmatch(u, -1); m != nil { + return fmt.Sprintf("%s%s/%s", m[0][1], host, m[0][3]), nil + } else { + return "", fmt.Errorf("'%v' does not match '%v'", u, urlHostRegex) + } +} + +func pickRandomlyFromMap[K comparable, V any](m map[K]V, min int, max int) map[K]V { + if min < 0 || max < 0 { + panic("min and max must be >= 0") + } + l := len(m) + if min > l || max > l { + panic(fmt.Sprintf("min and max must be <= %d", l)) + } + n := min + rand.Intn(max-min+1) + if n == l { + return m + } + // let's use a deep copy so we can remove elements as we pick them + c := make(map[K]V, l) + maps.Copy(c, m) + // r will hold the results + r := make(map[K]V, n) + for range n { + pick := rand.Intn(len(c)) + j := 0 + for k, v := range m { + if j == pick { + delete(c, k) + r[k] = v + break + } + j++ + } + } + return r +} + +var productName = "jmaptest" + +type TestJmapClient struct { + h *http.Client + username string + password string + session *Session + u *url.URL + trace bool + color bool +} + +func NewTestJmapClient(session *Session, username string, password string, trace bool, color bool) (*TestJmapClient, error) { + httpTransport := http.DefaultTransport.(*http.Transport).Clone() + tlsConfig := &tls.Config{InsecureSkipVerify: true} + httpTransport.TLSClientConfig = tlsConfig + h := http.DefaultClient + h.Transport = httpTransport + + u, err := url.Parse(session.ApiUrl) + if err != nil { + return nil, err + } + + return &TestJmapClient{ + h: h, + trace: trace, + color: color, + username: username, + password: password, + session: session, + u: u, + }, nil +} + +func (j *TestJmapClient) Close() error { + return nil +} + +type uploadedBlob struct { + BlobId string `json:"blobId"` + Size int `json:"size"` + Type string `json:"type"` + Sha512 string `json:"sha:512"` +} + +func (j *TestJmapClient) uploadBlob(accountId string, data []byte, mimetype string) (uploadedBlob, error) { + uploadUrl := strings.ReplaceAll(j.session.UploadUrl, "{accountId}", accountId) + req, err := http.NewRequest(http.MethodPost, uploadUrl, bytes.NewReader(data)) + if err != nil { + return uploadedBlob{}, err + } + req.Header.Add("Content-Type", mimetype) + req.SetBasicAuth(j.username, j.password) + res, err := j.h.Do(req) + if err != nil { + return uploadedBlob{}, err + } + defer res.Body.Close() + var response []byte = nil + if j.trace { + if b, err := httputil.DumpResponse(res, false); err == nil { + response, err = io.ReadAll(res.Body) + if err != nil { + return uploadedBlob{}, err + } + p := pretty.Pretty(response) + if j.color { + p = pretty.Color(p, nil) + } + log.Printf("<== %s%s\n", b, p) + } + } + if res.StatusCode < 200 || res.StatusCode > 299 { + return uploadedBlob{}, fmt.Errorf("blob uploading to '%v': status is %s", uploadUrl, res.Status) + } + if response == nil { + response, err = io.ReadAll(res.Body) + if err != nil { + return uploadedBlob{}, err + } + } + + var result uploadedBlob + err = json.Unmarshal(response, &result) + if err != nil { + return uploadedBlob{}, err + } + + return result, nil +} + +func (j *TestJmapClient) command(body map[string]any) ([]any, error) { + payload, err := json.Marshal(body) + if err != nil { + return nil, err + } + req, err := http.NewRequest(http.MethodPost, j.u.String(), bytes.NewReader(payload)) + if err != nil { + return nil, err + } + + if j.trace { + if b, err := httputil.DumpRequestOut(req, false); err == nil { + p := pretty.Pretty(payload) + if j.color { + p = pretty.Color(p, nil) + } + log.Printf("==> %s%s\n", b, p) + } + } + + req.SetBasicAuth(j.username, j.password) + resp, err := j.h.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + var response []byte = nil + if j.trace { + if b, err := httputil.DumpResponse(resp, false); err == nil { + response, err = io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + p := pretty.Pretty(response) + if j.color { + p = pretty.Color(p, nil) + } + log.Printf("<== %s%s\n", b, p) + } + } + if resp.StatusCode >= 300 { + return nil, fmt.Errorf("JMAP command HTTP response status is %s", resp.Status) + } + if response == nil { + response, err = io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + } + + r := map[string]any{} + err = json.Unmarshal(response, &r) + if err != nil { + return nil, err + } + + return r["methodResponses"].([]any), nil +} + +type Commander[T any] struct { + j *TestJmapClient + closure func([]any) (T, error) +} + +func newCommander[T any](j *TestJmapClient, closure func([]any) (T, error)) Commander[T] { + return Commander[T]{j: j, closure: closure} +} + +func (c Commander[T]) command(body map[string]any) (T, error) { + var zero T + methodResponses, err := c.j.command(body) + if err != nil { + return zero, err + } + return c.closure(methodResponses) +} + +func (j *TestJmapClient) create(id string, objectType ObjectType, body map[string]any) (string, error) { + return newCommander(j, func(methodResponses []any) (string, error) { + z := methodResponses[0].([]any) + f := z[1].(map[string]any) + if x, ok := f["created"]; ok { + created := x.(map[string]any) + if c, ok := created[id].(map[string]any); ok { + return c["id"].(string), nil + } else { + return "", fmt.Errorf("failed to create %v", objectType) + } + } else { + if ncx, ok := f["notCreated"]; ok { + nc := ncx.(map[string]any) + c := nc[id].(map[string]any) + return "", fmt.Errorf("failed to create %v: %v", objectType, c["description"]) + } else { + return "", fmt.Errorf("failed to create %v", objectType) + } + } + }).command(body) +} + +func (j *TestJmapClient) create1(accountId string, objectType ObjectType, ns string, obj map[string]any) (string, error) { + body := map[string]any{ + "using": []string{JmapCore, ns}, + "methodCalls": []any{ + []any{ + objectType + "/set", + map[string]any{ + "accountId": accountId, + "create": map[string]any{ + "c": obj, + }, + }, + "0", + }, + }, + } + return j.create("c", objectType, body) +} + +func (j *TestJmapClient) objectsById(accountId string, objectType ObjectType, scope string) (map[string]map[string]any, error) { + m := map[string]map[string]any{} + { + body := map[string]any{ + "using": []string{JmapCore, scope}, + "methodCalls": []any{ + []any{ + objectType + "/get", + map[string]any{ + "accountId": accountId, + }, + "0", + }, + }, + } + result, err := newCommander(j, func(methodResponses []any) ([]any, error) { + z := methodResponses[0].([]any) + f := z[1].(map[string]any) + if list, ok := f["list"]; ok { + return list.([]any), nil + } else { + return nil, fmt.Errorf("methodResponse[1] has no 'list' attribute: %v", f) + } + }).command(body) + if err != nil { + return nil, err + } + for _, a := range result { + obj := a.(map[string]any) + id := obj["id"].(string) + m[id] = obj + } + } + return m, nil +} + +func createName(person *gofakeit.PersonInfo) (map[string]any, jscontact.Name) { + o := jscontact.Name{ + Type: jscontact.NameType, + } + m := map[string]any{ + "@type": "Name", + } + mComps := make([]map[string]string, 2) + oComps := make([]jscontact.NameComponent, 2) + mComps[0] = map[string]string{ + "kind": "given", + "value": person.FirstName, + } + oComps[0] = jscontact.NameComponent{ + Type: jscontact.NameComponentType, + Kind: jscontact.NameComponentKindGiven, + Value: person.FirstName, + } + mComps[1] = map[string]string{ + "kind": "surname", + "value": person.LastName, + } + oComps[1] = jscontact.NameComponent{ + Type: jscontact.NameComponentType, + Kind: jscontact.NameComponentKindSurname, + Value: person.LastName, + } + m["components"] = mComps + o.Components = oComps + m["isOrdered"] = true + o.IsOrdered = true + m["defaultSeparator"] = " " + o.DefaultSeparator = " " + full := fmt.Sprintf("%s %s", person.FirstName, person.LastName) + m["full"] = full + o.Full = full + return m, o +} + +func createNickName(_ *gofakeit.PersonInfo) (map[string]any, jscontact.Nickname) { + name := gofakeit.PetName() + contexts := pickRandoms(jscontact.NicknameContextPrivate, jscontact.NicknameContextWork) + return map[string]any{ + "@type": "Nickname", + "name": name, + "contexts": toBoolMap(structs.Map(contexts, func(s jscontact.NicknameContext) string { return string(s) })), + }, jscontact.Nickname{ + Type: jscontact.NicknameType, + Name: name, + Contexts: orNilMap(toBoolMap(contexts)), + } +} + +func createEmail(person *gofakeit.PersonInfo, pref int) (map[string]any, jscontact.EmailAddress) { + email := person.Contact.Email + contexts := pickRandoms1(jscontact.EmailAddressContextWork, jscontact.EmailAddressContextPrivate) + label := strings.ToLower(person.FirstName) + return map[string]any{ + "@type": "EmailAddress", + "address": email, + "contexts": toBoolMap(structs.Map(contexts, func(s jscontact.EmailAddressContext) string { return string(s) })), + "label": label, + "pref": pref, + }, jscontact.EmailAddress{ + Type: jscontact.EmailAddressType, + Address: email, + Contexts: orNilMap(toBoolMap(contexts)), + Label: label, + Pref: uint(pref), + } +} + +func createSecondaryEmail(email string, pref int) (map[string]any, jscontact.EmailAddress) { + contexts := pickRandoms(jscontact.EmailAddressContextWork, jscontact.EmailAddressContextPrivate) + return map[string]any{ + "@type": "EmailAddress", + "address": email, + "contexts": toBoolMap(structs.Map(contexts, func(s jscontact.EmailAddressContext) string { return string(s) })), + "pref": pref, + }, jscontact.EmailAddress{ + Type: jscontact.EmailAddressType, + Address: email, + Contexts: orNilMap(toBoolMap(contexts)), + Pref: uint(pref), + } +} + +var idFirstLetters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") +var idOtherLetters = append(idFirstLetters, []rune("0123456789")...) + +func id() string { + n := 4 + rand.Intn(12-4+1) + b := make([]rune, n) + b[0] = idFirstLetters[rand.Intn(len(idFirstLetters))] + for i := 1; i < n; i++ { + b[i] = idOtherLetters[rand.Intn(len(idOtherLetters))] + } + return string(b) +} + +func toHtml(text string) string { + return "\n\n\n" + strings.Join(htmlJoin(paraSplitter.Split(text, -1)), "\n") + "\n" +} + +func htmlJoin(parts []string) []string { + var result []string + for i := range parts { + result = append(result, fmt.Sprintf("

%v

", parts[i])) + } + return result +} + +var paraSplitter = regexp.MustCompile("[\r\n]+") + +var timezones = []string{ + "America/Adak", + "America/Anchorage", + "America/Chicago", + "America/Denver", + "America/Detroit", + "America/Indiana/Knox", + "America/Kentucky/Louisville", + "America/Los_Angeles", + "America/New_York", + "Europe/Brussels", + "Europe/Berlin", + "Europe/Paris", +} + +// https://www.w3.org/TR/css-color-3/#html4 +var basicColors = []string{ + "black", + "silver", + "gray", + "white", + "maroon", + "red", + "purple", + "fuchsia", + "green", + "lime", + "olive", + "yellow", + "navy", + "blue", + "teal", + "aqua", +} + +// https://www.w3.org/TR/SVG11/types.html#ColorKeywords +var extendedColors = []string{ + "aliceblue", + "antiquewhite", + "aqua", + "aquamarine", + "azure", + "beige", + "bisque", + "black", + "blanchedalmond", + "blue", + "blueviolet", + "brown", + "burlywood", + "cadetblue", + "chartreuse", + "chocolate", + "coral", + "cornflowerblue", + "cornsilk", + "crimson", + "cyan", + "darkblue", + "darkcyan", + "darkgoldenrod", + "darkgray", + "darkgreen", + "darkgrey", + "darkkhaki", + "darkmagenta", + "darkolivegreen", + "darkorange", + "darkorchid", + "darkred", + "darksalmon", + "darkseagreen", + "darkslateblue", + "darkslategray", + "darkslategrey", + "darkturquoise", + "darkviolet", + "deeppink", + "deepskyblue", + "dimgray", + "dimgrey", + "dodgerblue", + "firebrick", + "floralwhite", + "forestgreen", + "fuchsia", + "gainsboro", + "ghostwhite", + "gold", + "goldenrod", + "gray", + "grey", + "green", + "greenyellow", + "honeydew", + "hotpink", + "indianred", + "indigo", + "ivory", + "khaki", + "lavender", + "lavenderblush", + "lawngreen", + "lemonchiffon", + "lightblue", + "lightcoral", + "lightcyan", + "lightgoldenrodyellow", + "lightgray", + "lightgreen", + "lightgrey", + "lightpink", + "lightsalmon", + "lightseagreen", + "lightskyblue", + "lightslategray", + "lightslategrey", + "lightsteelblue", + "lightyellow", + "lime", + "limegreen", + "linen", + "magenta", + "maroon", + "mediumaquamarine", + "mediumblue", + "mediumorchid", + "mediumpurple", + "mediumseagreen", + "mediumslateblue", + "mediumspringgreen", + "mediumturquoise", + "mediumvioletred", + "midnightblue", + "mintcream", + "mistyrose", + "moccasin", + "navajowhite", + "navy", + "oldlace", + "olive", + "olivedrab", + "orange", + "orangered", + "orchid", + "palegoldenrod", + "palegreen", + "paleturquoise", + "palevioletred", + "papayawhip", + "peachpuff", + "peru", + "pink", + "plum", + "powderblue", + "purple", + "red", + "rosybrown", + "royalblue", + "saddlebrown", + "salmon", + "sandybrown", + "seagreen", + "seashell", + "sienna", + "silver", + "skyblue", + "slateblue", + "slategray", + "slategrey", + "snow", + "springgreen", + "steelblue", + "tan", + "teal", + "thistle", + "tomato", + "turquoise", + "violet", + "wheat", + "white", + "whitesmoke", + "yellow", + "yellowgreen", +} + +func propmap[T any](enabled bool, min int, max int, container map[string]any, name string, cardProperty *map[string]T, generator func(int, string) (map[string]any, T, error)) error { + if !enabled { + return nil + } + n := min + rand.Intn(max-min+1) + + m := make(map[string]map[string]any, n) + o := make(map[string]T, n) + for i := range n { + id := id() + itemForMap, itemForCard, err := generator(i, id) + if err != nil { + return err + } + if itemForMap != nil { + m[id] = itemForMap + o[id] = itemForCard + } + } + if len(m) > 0 { + container[name] = m + *cardProperty = o + } + return nil +} + +func externalImageUri() string { + return fmt.Sprintf("https://picsum.photos/id/%d/%d/%d", 1+rand.Intn(200), 200, 300) +} + +func orNilMap[K comparable, V any](m map[K]V) map[K]V { + if len(m) < 1 { + return nil + } else { + return m + } +} + +func toBoolMap[K comparable](s []K) map[K]bool { + m := make(map[K]bool, len(s)) + for _, e := range s { + m[e] = true + } + return m +} + +func toBoolMapS[K comparable](s ...K) map[K]bool { + m := make(map[K]bool, len(s)) + for _, e := range s { + m[e] = true + } + return m +} + +func pickRandom[T any](s ...T) T { + return s[rand.Intn(len(s))] +} + +func pickUser() User { + return users[rand.Intn(len(users))] +} + +func pickRandoms[T any](s ...T) []T { + n := rand.Intn(len(s)) + if n == 0 { + return []T{} + } + result := make([]T, n) + o := make([]T, len(s)) + copy(o, s) + for i := range n { + p := rand.Intn(len(o)) + result[i] = slices.Delete(o, p, p)[0] + } + return result +} + +func pickRandoms1[T any](s ...T) []T { + n := 1 + rand.Intn(len(s)-1) + result := make([]T, n) + o := make([]T, len(s)) + copy(o, s) + for i := range n { + p := rand.Intn(len(o)) + result[i] = slices.Delete(o, p, p)[0] + } + return result +} + +func pickLanguage() string { + return pickRandom("en-US", "en-GB", "en-AU") +} + +func pickLocale() string { + return pickRandom("en", "fr", "de") +} + +func allBoxesAreTicked[S any](t *testing.T, s S, exceptions ...string) { + v := reflect.ValueOf(s) + typ := v.Type() + for i := range v.NumField() { + name := typ.Field(i).Name + if slices.Contains(exceptions, name) { + log.Printf("(/) %s\n", name) + continue + } + value := v.Field(i).Bool() + if value { + log.Printf("(X) %s\n", name) + } else { + log.Printf("( ) %s\n", name) + } + require.True(t, value, "should be true: %v", name) + } +} + +func deepEqual[T any](t *testing.T, expected, actual T) { + diff := "" + if EnableTypes { + diff = cmp.Diff(expected, actual) + } else { + diff = cmp.Diff(expected, actual, cmp.FilterPath(func(p cmp.Path) bool { + switch sf := p.Last().(type) { + case cmp.StructField: + return sf.String() == ".Type" + } + return false + }, cmp.Ignore())) + } + require.Empty(t, diff) +} diff --git a/pkg/jmap/jmap_integration_ws_test.go b/pkg/jmap/jmap_integration_ws_test.go new file mode 100644 index 0000000000..673949775d --- /dev/null +++ b/pkg/jmap/jmap_integration_ws_test.go @@ -0,0 +1,230 @@ +package jmap + +import ( + "sync" + "sync/atomic" + "testing" + "time" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type testWsPushListener struct { + t *testing.T + logger *log.Logger + username string + mailAccountId string + calls atomic.Uint32 + m sync.Mutex + emailStates []string + threadStates []string + mailboxStates []string +} + +func (l *testWsPushListener) OnNotification(username string, pushState StateChange) { + assert.Equal(l.t, l.username, username) + l.calls.Add(1) + // pushState is currently not supported by Stalwart, let's use the object states instead + l.logger.Debug().Msgf("received %T: %v", pushState, pushState) + if changed, ok := pushState.Changed[l.mailAccountId]; ok { + l.m.Lock() + if st, ok := changed[EmailType]; ok { + l.emailStates = append(l.emailStates, st) + } + if st, ok := changed[ThreadType]; ok { + l.threadStates = append(l.threadStates, st) + } + if st, ok := changed[MailboxType]; ok { + l.mailboxStates = append(l.mailboxStates, st) + } + l.m.Unlock() + + unsupportedKeys := structs.Filter(structs.Keys(changed), func(o ObjectType) bool { return o != EmailType && o != ThreadType && o != MailboxType }) + assert.Empty(l.t, unsupportedKeys) + } + unsupportedAccounts := structs.Filter(structs.Keys(pushState.Changed), func(s string) bool { return s != l.mailAccountId }) + assert.Empty(l.t, unsupportedAccounts) +} + +var _ WsPushListener = &testWsPushListener{} + +func TestWs(t *testing.T) { + if skip(t) { + return + } + + assert.NoError(t, nil) + + require := require.New(t) + + s, err := newStalwartTest(t) + require.NoError(err) + defer s.Close() + + user := pickUser() + session := s.Session(user.name) + + mailAccountId := session.PrimaryAccounts.Mail + inboxFolder := "" + { + _, inboxFolder = s.findInbox(t, mailAccountId, session) + } + + l := &testWsPushListener{t: t, username: user.name, logger: s.logger, mailAccountId: mailAccountId} + s.client.AddWsPushListener(l) + + require.Equal(uint32(0), l.calls.Load()) + { + l.m.Lock() + require.Len(l.emailStates, 0) + require.Len(l.mailboxStates, 0) + require.Len(l.threadStates, 0) + l.m.Unlock() + } + + var initialState State + { + changes, sessionState, state, _, err := s.client.GetEmailChanges(mailAccountId, session, s.ctx, s.logger, "", "", 0) + require.NoError(err) + require.Equal(session.State, sessionState) + require.NotEmpty(state) + //fmt.Printf("\x1b[45;1;4mChanges [%s]:\x1b[0m\n", state) + //for _, c := range changes.Created { fmt.Printf("%s %s\n", c.Id, c.Subject) } + initialState = state + require.Empty(changes.Created) + require.Empty(changes.Destroyed) + require.Empty(changes.Updated) + } + require.NotEmpty(initialState) + + { + changes, sessionState, state, _, err := s.client.GetEmailChanges(mailAccountId, session, s.ctx, s.logger, "", initialState, 0) + require.NoError(err) + require.Equal(session.State, sessionState) + require.Equal(initialState, state) + require.Equal(initialState, changes.NewState) + require.Empty(changes.Created) + require.Empty(changes.Destroyed) + require.Empty(changes.Updated) + } + + wsc, err := s.client.EnablePushNotifications(initialState, func() (*Session, error) { return session, nil }) + require.NoError(err) + defer wsc.Close() + + require.Equal(uint32(0), l.calls.Load()) + { + l.m.Lock() + require.Len(l.emailStates, 0) + require.Len(l.mailboxStates, 0) + require.Len(l.threadStates, 0) + l.m.Unlock() + } + + emailIds := []string{} + + { + _, n, err := s.fillEmailsWithImap(inboxFolder, 1, false, user) + require.NoError(err) + require.Equal(1, n) + } + + require.Eventually(func() bool { + return l.calls.Load() == uint32(1) + }, 3*time.Second, 200*time.Millisecond, "WS push listener was not called after first email state change") + { + l.m.Lock() + require.Len(l.emailStates, 1) + require.Len(l.mailboxStates, 1) + require.Len(l.threadStates, 1) + l.m.Unlock() + } + var lastState State + { + changes, sessionState, state, _, err := s.client.GetEmailChanges(mailAccountId, session, s.ctx, s.logger, "", initialState, 0) + require.NoError(err) + require.Equal(session.State, sessionState) + require.NotEqual(initialState, state) + require.NotEqual(initialState, changes.NewState) + require.Equal(state, changes.NewState) + require.Len(changes.Created, 1) + require.Empty(changes.Destroyed) + require.Empty(changes.Updated) + lastState = state + + emailIds = append(emailIds, changes.Created...) + } + + { + _, n, err := s.fillEmailsWithImap(inboxFolder, 1, false, user) + require.NoError(err) + require.Equal(1, n) + } + + require.Eventually(func() bool { + return l.calls.Load() == uint32(2) + }, 3*time.Second, 200*time.Millisecond, "WS push listener was not called after second email state change") + { + l.m.Lock() + require.Len(l.emailStates, 2) + require.Len(l.mailboxStates, 2) + require.Len(l.threadStates, 2) + assert.NotEqual(t, l.emailStates[0], l.emailStates[1]) + assert.NotEqual(t, l.mailboxStates[0], l.mailboxStates[1]) + assert.NotEqual(t, l.threadStates[0], l.threadStates[1]) + l.m.Unlock() + } + { + changes, sessionState, state, _, err := s.client.GetEmailChanges(mailAccountId, session, s.ctx, s.logger, "", lastState, 0) + require.NoError(err) + require.Equal(session.State, sessionState) + require.NotEqual(lastState, state) + require.NotEqual(lastState, changes.NewState) + require.Equal(state, changes.NewState) + require.Len(changes.Created, 1) + require.Empty(changes.Destroyed) + require.Empty(changes.Updated) + lastState = state + + emailIds = append(emailIds, changes.Created...) + } + + { + _, n, err := s.fillEmailsWithImap(inboxFolder, 0, true, user) + require.NoError(err) + require.Equal(0, n) + } + + require.Eventually(func() bool { + return l.calls.Load() == uint32(3) + }, 3*time.Second, 200*time.Millisecond, "WS push listener was not called after third email state change") + { + l.m.Lock() + require.Len(l.emailStates, 3) + require.Len(l.mailboxStates, 3) + require.Len(l.threadStates, 3) + assert.NotEqual(t, l.emailStates[1], l.emailStates[2]) + assert.NotEqual(t, l.mailboxStates[1], l.mailboxStates[2]) + assert.NotEqual(t, l.threadStates[1], l.threadStates[2]) + l.m.Unlock() + } + { + changes, sessionState, state, _, err := s.client.GetEmailChanges(mailAccountId, session, s.ctx, s.logger, "", lastState, 0) + require.NoError(err) + require.Equal(session.State, sessionState) + require.NotEqual(lastState, state) + require.NotEqual(lastState, changes.NewState) + require.Equal(state, changes.NewState) + require.Empty(changes.Created) + require.Len(changes.Destroyed, 2) + require.EqualValues(emailIds, changes.Destroyed) + require.Empty(changes.Updated) + lastState = state + } + + err = wsc.DisableNotifications() + require.NoError(err) +} diff --git a/pkg/jmap/jmap_model.go b/pkg/jmap/jmap_model.go new file mode 100644 index 0000000000..3863b61bf8 --- /dev/null +++ b/pkg/jmap/jmap_model.go @@ -0,0 +1,6045 @@ +package jmap + +import ( + "io" + "time" + + "github.com/opencloud-eu/opencloud/pkg/jscalendar" + "github.com/opencloud-eu/opencloud/pkg/jscontact" +) + +// https://www.iana.org/assignments/jmap/jmap.xml#jmap-data-types +type ObjectType string + +// Where `UTCDate` is given as a type, it means a `Date` where the "time-offset" +// component MUST be `"Z"` (i.e., it must be in UTC time). +// +// For example, `"2014-10-30T06:12:00Z"`. +type UTCDate string + +// Where `LocalDate` is given as a type, it means a string in the same format as `Date` +// (see [RFC8620, Section 1.4]), but with the time-offset omitted from the end. +// +// For example, `2014-10-30T14:12:00`. +// +// The interpretation in absolute time depends upon the time zone for the event, which +// may not be a fixed offset (for example when daylight saving time occurs). +// +// [RFC8620, Section 1.4]: https://www.rfc-editor.org/rfc/rfc8620.html#section-1.4 +type LocalDate string + +// Should the calendar’s events be used as part of availability calculation? +// +// This MUST be one of: +// !- `all“: all events are considered. +// !- `attending“: events the user is a confirmed or tentative participant of are considered. +// !- `none“: all events are ignored (but may be considered if also in another calendar). +// +// This should default to “all” for the calendars in the user’s own account, and “none” for calendars shared with the user. +type IncludeInAvailability string + +type TypeOfCalendarAlert string + +// `CalendarEventNotification` type. +// +// This MUST be one of +// !- `created` +// !- `updated` +// !- `destroyed` +type CalendarEventNotificationTypeOption string + +// `Principal` type. +// +// This MUST be one of the following values: +// !- `individual`: This represents a single person. +// !- `group`: This represents a group of people. +// !- `resource`: This represents some resource, e.g. a projector. +// !- `location`: This represents a location. +// !- `other`: This represents some other undefined principal. +type PrincipalTypeOption string + +// Algorithms in this list MUST be present in the ["HTTP Digest Algorithm Values" registry] +// defined by [RFC3230]; however, in JMAP, they must be lowercased, e.g., "md5" rather than +// "MD5". +// +// Clients SHOULD prefer algorithms listed earlier in this list. +// +// ["HTTP Digest Algorithm Values" registry]: https://www.iana.org/assignments/http-dig-alg/http-dig-alg.xhtml +type HttpDigestAlgorithm string + +// The ResourceType data type is used to act as a unit of measure for the quota usage. +type ResourceType string + +// The Scope data type is used to represent the entities the quota applies to. +type Scope string + +// Determines which action must be performed by the MUA or MTA upon receiption. +// +// !- `manual-action`: the disposition described by the disposition type was a result of an +// explicit instruction by the user rather than some sort of automatically performed action. +// (This might include the case when the user has manually configured her MUA to automatically +// respond to valid MDN requests.) Unless prescribed otherwise in a particular mail environment, +// in order to preserve the user's privacy, this MUST be the default for MUAs. +// !- `automatic-action`: the disposition described by the disposition type was a result of an +// automatic action rather than an explicit instruction by the user for this message. This +// is typically generated by a Mail Delivery Agent (e.g., MDN generations by Sieve reject action +// [RFC5429], Fax-over-Email [RFC3249], voice message system (see Voice Profile for Internet +// Mail (VPIM) [RFC3801]), or upon delivery to a mailing list). +type ActionMode string + +type SendingMode string +type DispositionTypeOption string + +type Duration string + +const ( + JmapCore = "urn:ietf:params:jmap:core" + JmapMail = "urn:ietf:params:jmap:mail" + JmapMDN = "urn:ietf:params:jmap:mdn" // https://datatracker.ietf.org/doc/rfc9007/ + JmapSubmission = "urn:ietf:params:jmap:submission" + JmapVacationResponse = "urn:ietf:params:jmap:vacationresponse" + JmapCalendars = "urn:ietf:params:jmap:calendars" + JmapContacts = "urn:ietf:params:jmap:contacts" + JmapSieve = "urn:ietf:params:jmap:sieve" + JmapBlob = "urn:ietf:params:jmap:blob" + JmapQuota = "urn:ietf:params:jmap:quota" + JmapWebsocket = "urn:ietf:params:jmap:websocket" + JmapPrincipals = "urn:ietf:params:jmap:principals" + JmapPrincipalsOwner = "urn:ietf:params:jmap:principals:owner" + JmapTasks = "urn:ietf:params:jmap:tasks" + JmapTasksRecurrences = "urn:ietf:params:jmap:tasks:recurrences" + JmapTasksAssignees = "urn:ietf:params:jmap:tasks:assignees" + JmapTasksAlerts = "urn:ietf:params:jmap:tasks:alerts" + JmapTasksMultilingual = "urn:ietf:params:jmap:tasks:multilingual" + JmapTasksCustomTimezones = "urn:ietf:params:jmap:tasks:customtimezones" + + CoreType = ObjectType("Core") + PushSubscriptionType = ObjectType("PushSubscription") + MailboxType = ObjectType("Mailbox") + ThreadType = ObjectType("Thread") + EmailType = ObjectType("Email") + EmailDeliveryType = ObjectType("EmailDelivery") + SearchSnippetType = ObjectType("SearchSnippet") + IdentityType = ObjectType("Identity") + EmailSubmissionType = ObjectType("EmailSubmission") + VacationResponseType = ObjectType("VacationResponse") + MDNType = ObjectType("MDN") + QuotaType = ObjectType("Quota") + SieveScriptType = ObjectType("SieveScript") + PrincipalType = ObjectType("PrincipalType") + ShareNotificationType = ObjectType("ShareNotification") + AddressBookType = ObjectType("AddressBook") + ContactCardType = ObjectType("ContactCard") + CalendarType = ObjectType("Calendar") + CalendarEventType = ObjectType("CalendarEvent") + CalendarEventNotificationType = ObjectType("CalendarEventNotification") + ParticipantIdentityType = ObjectType("ParticipantIdentity") + + JmapKeywordPrefix = "$" + JmapKeywordSeen = "$seen" + JmapKeywordDraft = "$draft" + JmapKeywordFlagged = "$flagged" + JmapKeywordAnswered = "$answered" + JmapKeywordForwarded = "$forwarded" + JmapKeywordPhishing = "$phishing" + JmapKeywordJunk = "$junk" + JmapKeywordNotJunk = "$notjunk" + JmapKeywordMdnSent = "$mdnsent" + + // https://www.iana.org/assignments/imap-mailbox-name-attributes/imap-mailbox-name-attributes.xhtml + //JmapMailboxRoleAll = "all" + //JmapMailboxRoleArchive = "archive" + JmapMailboxRoleDrafts = "drafts" + //JmapMailboxRoleFlagged = "flagged" + //JmapMailboxRoleImportant = "important" + JmapMailboxRoleInbox = "inbox" + JmapMailboxRoleJunk = "junk" + JmapMailboxRoleSent = "sent" + //JmapMailboxRoleSubscribed = "subscribed" + JmapMailboxRoleTrash = "trash" + + CalendarAlertType = TypeOfCalendarAlert("CalendarAlert") + + CalendarEventNotificationTypeOptionCreated = CalendarEventNotificationTypeOption("created") + CalendarEventNotificationTypeOptionUpdated = CalendarEventNotificationTypeOption("updated") + CalendarEventNotificationTypeOptionDestroyed = CalendarEventNotificationTypeOption("destroyed") + + // This represents a single person. + PrincipalTypeOptionIndividual = PrincipalTypeOption("individual") + // This represents a group of people. + PrincipalTypeOptionGroup = PrincipalTypeOption("group") + // This represents some resource, e.g. a projector. + PrincipalTypeOptionResource = PrincipalTypeOption("resource") + // This represents a location. + PrincipalTypeOptionLocation = PrincipalTypeOption("location") + // This represents some other undefined principal. + PrincipalTypeOptionOther = PrincipalTypeOption("other") + + HttpDigestAlgorithmAdler32 = HttpDigestAlgorithm("adler32") + HttpDigestAlgorithmCrc32c = HttpDigestAlgorithm("crc32c") + HttpDigestAlgorithmMd5 = HttpDigestAlgorithm("md5") + HttpDigestAlgorithmSha = HttpDigestAlgorithm("sha") + HttpDigestAlgorithmSha256 = HttpDigestAlgorithm("sha-256") + HttpDigestAlgorithmSha512 = HttpDigestAlgorithm("sha-512") + HttpDigestAlgorithmUnixSum = HttpDigestAlgorithm("unixsum") + HttpDigestAlgorithmUnixcksum = HttpDigestAlgorithm("unixcksum") + + // The quota is measured in a number of data type objects. + // + // For example, a quota can have a limit of 50 `Mail` objects. + ResourceTypeCount = ResourceType("count") + + // The quota is measured in size (in octets). + // + // For example, a quota can have a limit of 25000 octets. + ResourceTypeOctets = ResourceType("octets") + + // The quota information applies to just the client's account. + ScopeAccount = Scope("account") + // The quota information applies to all accounts sharing this domain. + ScopeDomain = Scope("domain") + // The quota information applies to all accounts belonging to the server. + ScopeGlobal = Scope("global") + + // The disposition described by the disposition type was a result of an explicit instruction by the + // user rather than some sort of automatically performed action. + // + // (This might include the case when the user has manually configured her MUA to automatically + // respond to valid MDN requests.) + // + // Unless prescribed otherwise in a particular mail environment, in order to preserve the user's + // privacy, this MUST be the default for MUAs. + ActionModeManualAction = ActionMode("manual-action") + + // The disposition described by the disposition type was a result of an automatic action rather than + // an explicit instruction by the user for this message. + // + // This is typically generated by a Mail Delivery Agent (e.g., MDN generations by Sieve reject + // action [RFC5429], Fax-over-Email [RFC3249], voice message system (see Voice Profile + // for Internet Mail (VPIM) [RFC3801]), or upon delivery to a mailing list). + ActionModeAutomaticAction = ActionMode("automatic-action") + + // The user explicitly gave permission for this particular MDN to be sent. + // + // Unless prescribed otherwise in a particular mail environment, in order to preserve the + // user's privacy, this MUST be the default for MUAs. + SendingModeMdnSentManually = SendingMode("mdn-sent-manually") + + // The MDN was sent because the MUA had previously been configured to do so automatically. + SendingModeMdnSentAutomatically = SendingMode("mdn-sent-automatically") + + // The message has been deleted. + // + // The recipient may or may not have seen the message. + // + // The recipient might "undelete" the message at a later time and read the message. + DispositionTypeOptionDeleted = DispositionTypeOption("deleted") + + // The message has been sent somewhere in some manner (e.g., printed, faxed, forwarded) without + // necessarily having been previously displayed to the user. + // + // The user may or may not see the message later. + DispositionTypeOptionDispatched = DispositionTypeOption("dispatched") + + // The message has been displayed by the MUA to someone reading the recipient's mailbox. + // + // There is no guarantee that the content has been read or understood. + DispositionTypeOptionDisplayed = DispositionTypeOption("displayed") + + // The message has been processed in some manner (i.e., by some sort of rules or server) + // without being displayed to the user. + // + // The user may or may not see the message later, or there may not even be a human user + // associated with the mailbox. + DispositionTypeOptionProcessed = DispositionTypeOption("processed") + + // All events are considered. + IncludeInAvailabilityAll = IncludeInAvailability("all") + // Events the user is a confirmed or tentative participant of are considered. + IncludeInAvailabilityAttending = IncludeInAvailability("attending") + // All events are ignored (but may be considered if also in another calendar). + IncludeInAvailabilityNone = IncludeInAvailability("none") +) + +var ( + ObjectTypes = []ObjectType{ + CoreType, + PushSubscriptionType, + MailboxType, + ThreadType, + EmailType, + EmailDeliveryType, + SearchSnippetType, + IdentityType, + EmailSubmissionType, + VacationResponseType, + MDNType, + QuotaType, + SieveScriptType, + PrincipalType, + ShareNotificationType, + AddressBookType, + ContactCardType, + CalendarType, + CalendarEventType, + CalendarEventNotificationType, + ParticipantIdentityType, + } + + JmapMailboxRoles = []string{ + JmapMailboxRoleInbox, + JmapMailboxRoleSent, + JmapMailboxRoleDrafts, + JmapMailboxRoleJunk, + JmapMailboxRoleTrash, + } + + CalendarEventNotificationOptionTypes = []CalendarEventNotificationTypeOption{ + CalendarEventNotificationTypeOptionCreated, + CalendarEventNotificationTypeOptionUpdated, + CalendarEventNotificationTypeOptionDestroyed, + } + + PrincipalTypeOptions = []PrincipalTypeOption{ + PrincipalTypeOptionIndividual, + PrincipalTypeOptionGroup, + PrincipalTypeOptionResource, + PrincipalTypeOptionLocation, + PrincipalTypeOptionOther, + } + + HttpDigestAlgorithms = []HttpDigestAlgorithm{ + HttpDigestAlgorithmAdler32, + HttpDigestAlgorithmCrc32c, + HttpDigestAlgorithmMd5, + HttpDigestAlgorithmSha, + HttpDigestAlgorithmSha256, + HttpDigestAlgorithmSha512, + HttpDigestAlgorithmUnixSum, + HttpDigestAlgorithmUnixcksum, + } + + ResourceTypes = []ResourceType{ + ResourceTypeCount, + ResourceTypeOctets, + } + + Scopes = []Scope{ + ScopeAccount, + ScopeDomain, + ScopeGlobal, + } + + ActionModes = []ActionMode{ + ActionModeManualAction, + ActionModeAutomaticAction, + } + + SendingModes = []SendingMode{ + SendingModeMdnSentManually, + SendingModeMdnSentAutomatically, + } + + DispositionTypeOptions = []DispositionTypeOption{ + DispositionTypeOptionDeleted, + DispositionTypeOptionDispatched, + DispositionTypeOptionDisplayed, + DispositionTypeOptionProcessed, + } + + IncludeInAvailabilities = []IncludeInAvailability{ + IncludeInAvailabilityAll, + IncludeInAvailabilityAttending, + IncludeInAvailabilityNone, + } +) + +type SessionMailAccountCapabilities struct { + // The maximum number of Mailboxes that can be can assigned to a single Email object. + // + // This MUST be an integer >= 1, or null for no limit (or rather, the limit is always + // the number of Mailboxes in the account). + MaxMailboxesPerEmail int `json:"maxMailboxesPerEmail"` + + // The maximum depth of the Mailbox hierarchy (i.e., one more than the maximum + // number of ancestors a Mailbox may have), or null for no limit. + MaxMailboxDepth int `json:"maxMailboxDepth"` + + // The maximum length, in (UTF-8) octets, allowed for the name of a Mailbox. + // + // This MUST be at least 100, although it is recommended servers allow more. + MaxSizeMailboxName int `json:"maxSizeMailboxName"` + + // The maximum total size of attachments, in octets, allowed for a single Email object. + // + // A server MAY still reject the import or creation of an Email with a lower attachment size + // total (for example, if the body includes several megabytes of text, causing the size of + // the encoded MIME structure to be over some server-defined limit). + // + // Note that this limit is for the sum of unencoded attachment sizes. Users are generally + // not knowledgeable about encoding overhead, etc., nor should they need to be, so marketing + // and help materials normally tell them the “max size attachments”. + // + // This is the unencoded size they see on their hard drive, so this capability matches that + // and allows the client to consistently enforce what the user understands as the limit. + // + // The server may separately have a limit for the total size of the message [RFC5322], + // created by combining the attachments (often base64 encoded) with the message headers and bodies. + // + // For example, suppose the server advertises maxSizeAttachmentsPerEmail: 50000000 (50 MB). + // The enforced server limit may be for a message size of 70000000 octets. + // Even with base64 encoding and a 2 MB HTML body, 50 MB attachments would fit under this limit. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + MaxSizeAttachmentsPerEmail int `json:"maxSizeAttachmentsPerEmail"` + + // A list of all the values the server supports for the “property” field of the Comparator + // object in an Email/query sort. + // + // This MAY include properties the client does not recognise (for example, custom properties + // specified in a vendor extension). Clients MUST ignore any unknown properties in the list. + EmailQuerySortOptions []string `json:"emailQuerySortOptions"` + + // If true, the user may create a Mailbox in this account with a null parentId. + // + // (Permission for creating a child of an existing Mailbox is given by the myRights property + // on that Mailbox.) + MayCreateTopLevelMailbox bool `json:"mayCreateTopLevelMailbox"` +} + +type SessionSubmissionAccountCapabilities struct { + // The number in seconds of the maximum delay the server supports in sending. + // + // This is 0 if the server does not support delayed send. + MaxDelayedSend int `json:"maxDelayedSend"` + + // The set of SMTP submission extensions supported by the server, which the client may use + // when creating an EmailSubmission object. + // + // Each key in the object is the ehlo-name, and the value is a list of ehlo-args. + // + // A JMAP implementation that talks to a submission server [RFC6409] SHOULD have a configuration + // setting that allows an administrator to modify the set of submission EHLO capabilities it may + // expose on this property. + // + // This allows a JMAP server to easily add access to a new submission extension without code changes. + // + // By default, the JMAP server should hide EHLO capabilities that have to do with the transport + // mechanism and thus are only relevant to the JMAP server (for example, PIPELINING, CHUNKING, or STARTTLS). + // + // Examples of Submission extensions to include: + // - FUTURERELEASE [RFC4865] + // - SIZE [RFC1870] + // - DSN [RFC3461] + // - DELIVERYBY [RFC2852] + // - MT-PRIORITY [RFC6710] + // + // A JMAP server MAY advertise an extension and implement the semantics of that extension locally + // on the JMAP server even if a submission server used by JMAP doesn’t implement it. + // + // The full IANA registry of submission extensions can be found at [iana.org]. + // + // [RFC6409]: https://www.rfc-editor.org/rfc/rfc6409.html + // [RFC4865]: https://www.rfc-editor.org/rfc/rfc4865.html + // [RFC1870]: https://www.rfc-editor.org/rfc/rfc1870.html + // [RFC3461]: https://www.rfc-editor.org/rfc/rfc3461.html + // [RFC2852]: https://www.rfc-editor.org/rfc/rfc2852.html + // [RFC6710]: https://www.rfc-editor.org/rfc/rfc6710.html + // [iana.org]: https://www.iana.org/assignments/mail-parameters + SubmissionExtensions map[string][]string `json:"submissionExtensions"` +} + +// This represents support for the VacationResponse data type and associated API methods. +// +// The value of this property is an empty object in both the JMAP session capabilities +// property and an account’s accountCapabilities property. +type SessionVacationResponseAccountCapabilities struct { +} + +type SessionSieveAccountCapabilities struct { + // The maximum length, in octets, allowed for the name of a SieveScript. + // + // For compatibility with ManageSieve, this MUST be at least 512 (up to 128 Unicode characters). + MaxSizeScriptName int `json:"maxSizeScriptName"` + + // The maximum size (in octets) of a Sieve script the server is willing to store for the user, + // or null for no limit. + MaxSizeScript int `json:"maxSizeScript"` + + // The maximum number of Sieve scripts the server is willing to store for the user, or null for no limit. + MaxNumberScripts int `json:"maxNumberScripts"` + + // The maximum number of Sieve "redirect" actions a script can perform during a single evaluation, + // or null for no limit. + // + // Note that this is different from the total number of "redirect" actions a script can contain. + MaxNumberRedirects int `json:"maxNumberRedirects"` + + // A list of case-sensitive Sieve capability strings (as listed in the Sieve "require" action; + // see [RFC5228, Section 3.2]) indicating the extensions supported by the Sieve engine. + // + // [RFC5228, Section 3.2]: https://www.rfc-editor.org/rfc/rfc5228.html#section-3.2 + SieveExtensions []string `json:"sieveExtensions"` + + // A list of URI scheme parts [RFC3986] for notification methods supported by the Sieve "enotify" + // extension [RFC5435], or null if the extension is not supported by the Sieve engine. + // + // [RFC3986]: https://www.rfc-editor.org/rfc/rfc3986.html + // [RFC5435]: https://www.rfc-editor.org/rfc/rfc5435.html + NotificationMethods []string `json:"notificationMethods"` + + // A list of URI scheme parts [RFC3986] for externally stored list types supported by the + // Sieve "extlists" extension [RFC6134], or null if the extension is not supported by the Sieve engine. + // + // [RFC3986]: https://www.rfc-editor.org/rfc/rfc3986.html + // [RFC6134]: https://www.rfc-editor.org/rfc/rfc6134.html + ExternalLists []string `json:"externalLists"` +} + +type SessionBlobAccountCapabilities struct { + // The maximum size of the blob (in octets) that the server will allow to be created + // (including blobs created by concatenating multiple data sources together). + // + // Clients MUST NOT attempt to create blobs larger than this size. + // + // If this value is null, then clients are not required to limit the size of the blob + // they try to create, though servers can always reject creation of blobs regardless of + // size, e.g., due to lack of disk space or per-user rate limits. + MaxSizeBlobSet int `json:"maxSizeBlobSet"` + + // The maximum number of DataSourceObjects allowed per creation in a Blob/upload. + // + // Servers MUST allow at least 64 DataSourceObjects per creation. + MaxDataSources int `json:"maxDataSources"` + + // An array of data type names that are supported for Blob/lookup. + // + // If the server does not support lookups, then this will be the empty list. + // + // Note that the supportedTypeNames list may include private types that are not in the + // "JMAP Data Types" registry defined by this document. + // + // Clients MUST ignore type names they do not recognise. + SupportedTypeNames []string `json:"supportedTypeNames"` + + // An array of supported digest algorithms that are supported for Blob/get. + // + // If the server does not support calculating blob digests, then this will be the empty + // list. + // + // Algorithms in this list MUST be present in the ["HTTP Digest Algorithm Values" registry] + // defined by [RFC3230]; however, in JMAP, they must be lowercased, e.g., "md5" rather than + // "MD5". + // + // Clients SHOULD prefer algorithms listed earlier in this list. + // + // ["HTTP Digest Algorithm Values" registry]: https://www.iana.org/assignments/http-dig-alg/http-dig-alg.xhtml + SupportedDigestAlgorithms []HttpDigestAlgorithm `json:"supportedDigestAlgorithms"` +} + +type SessionQuotaAccountCapabilities struct { +} + +type SessionContactsAccountCapabilities struct { + // The maximum number of AddressBooks that can be can assigned to a single ContactCard object. + // + // This MUST be an integer >= 1, or null for no limit (or rather, the limit is always the number of AddressBooks + // in the account). + MaxAddressBooksPerCard uint `json:"maxAddressBooksPerCard,omitzero"` + + // If true, the user may create an AddressBook in this account. + MayCreateAddressBook bool `json:"mayCreateAddressBook"` +} + +type SessionCalendarsAccountCapabilities struct { + // The maximum number of Calendars that can be assigned to a single CalendarEvent object. + // + // This MUST be an integer >= 1, or null for no limit (or rather, the limit is always the + // number of Calendars in the account). + MaxCalendarsPerEvent *uint `json:"maxCalendarsPerEvent,omitempty"` + + // The earliest date-time value the server is willing to accept for any date stored in a CalendarEvent. + MinDateTime *UTCDate `json:"minDateTime,omitempty"` + + // The latest date-time value the server is willing to accept for any date stored in a CalendarEvent. + MaxDateTime *UTCDate `json:"maxDateTime,omitempty"` + + // The maximum duration the user may query over when asking the server to expand recurrences. + MaxExpandedQueryDuration Duration `json:"maxExpandedQueryDuration,omitzero"` + + // The maximum number of participants a single event may have, or null for no limit. + MaxParticipantsPerEvent *uint `json:"maxParticipantsPerEvent,omitzero"` + + // If true, the user may create a calendar in this account. + MayCreateCalendar *bool `json:"mayCreateCalendar,omitempty"` +} + +type SessionCalendarsParseAccountCapabilities struct { +} + +type SessionTasksAccountCapabilities struct { + // The earliest date-time the server is willing to accept for any date stored in a Task. + MinDateTime LocalDate `json:"minDateTime,omitzero"` + + // The latest date-time the server is willing to accept for any date stored in a Task. + MaxDateTime LocalDate `json:"maxDateTime,omitzero"` + + // If true, the user may create a task list in this account. + MayCreateTaskList bool `json:"mayCreateTaskList"` +} + +type SessionTasksRecurrencesAccountCapabilities struct { + // The maximum duration the user may query over when asking the server to expand recurrences. + MaxExpandedQueryDuration Duration `json:"maxExpandedQueryDuration"` +} + +type SessionTasksAssigneesAccountCapabilities struct { + // The maximum number of participants a single task may have, or null for no limit. + MaxParticipantsPerTask *uint `json:"maxParticipantsPerTask,omitzero"` +} + +type SessionTasksAlertsAccountCapabilities struct { +} + +type SessionTasksMultilingualAccountCapabilities struct { +} + +type SessionTasksCustomTimezonesAccountCapabilities struct { +} + +type SessionPrincipalsAccountCapabilities struct { + // The id of the principal in this account that corresponds to the user fetching this object, if any. + CurrentUserPrincipalId string `json:"currentUserPrincipalId,omitempty"` +} + +type SessionPrincipalsOwnerAccountCapabilities struct { + // The id of an account with the `urn:ietf:params:jmap:principals` capability that contains the + // corresponding `Principal` object. + AccountIdForPrincipal string `json:"accountIdForPrincipal,omitempty"` + + // The id of the `Principal` that owns this account. + PrincipalId string `json:"principalId,omitempty"` +} + +type SessionPrincipalAvailabilityAccountCapabilities struct { + // The maximum duration over which the server is prepared to calculate availability in a single call. + MaxAvailabilityDuration Duration `json:"maxAvailabilityDuration"` +} + +type SessionMDNAccountCapabilities struct { +} + +type SessionAccountCapabilities struct { + Mail *SessionMailAccountCapabilities `json:"urn:ietf:params:jmap:mail,omitempty"` + Submission *SessionSubmissionAccountCapabilities `json:"urn:ietf:params:jmap:submission,omitempty"` + VacationResponse *SessionVacationResponseAccountCapabilities `json:"urn:ietf:params:jmap:vacationresponse,omitempty"` + Sieve *SessionSieveAccountCapabilities `json:"urn:ietf:params:jmap:sieve,omitempty"` + Blob *SessionBlobAccountCapabilities `json:"urn:ietf:params:jmap:blob,omitempty"` + Quota *SessionQuotaAccountCapabilities `json:"urn:ietf:params:jmap:quota,omitempty"` + Contacts *SessionContactsAccountCapabilities `json:"urn:ietf:params:jmap:contacts,omitempty"` + Calendars *SessionCalendarsAccountCapabilities `json:"urn:ietf:params:jmap:calendars,omitempty"` + CalendarsParse *SessionCalendarsParseAccountCapabilities `json:"urn:ietf:params:jmap:calendars:parse,omitempty"` + Tasks *SessionTasksAccountCapabilities `json:"urn:ietf:params:jmap:tasks,omitempty"` + TasksRecurrences *SessionTasksRecurrencesAccountCapabilities `json:"urn:ietf:params:jmap:tasks:recurrences,omitempty"` + TasksAssignees *SessionTasksAssigneesAccountCapabilities `json:"urn:ietf:params:jmap:tasks:assignees,omitempty"` + TasksAlerts *SessionTasksAlertsAccountCapabilities `json:"urn:ietf:params:jmap:tasks:alerts,omitempty"` + TasksMultilingual *SessionTasksMultilingualAccountCapabilities `json:"urn:ietf:params:jmap:tasks:multilingual,omitempty"` + TasksCustomTimezones *SessionTasksCustomTimezonesAccountCapabilities `json:"urn:ietf:params:jmap:tasks:customtimezones,omitempty"` + Principals *SessionPrincipalsAccountCapabilities `json:"urn:ietf:params:jmap:principals,omitempty"` + PrincipalsOwner *SessionPrincipalsOwnerAccountCapabilities `json:"urn:ietf:params:jmap:principals:owner,omitempty"` + PrincipalsAvailability *SessionPrincipalAvailabilityAccountCapabilities `json:"urn:ietf:params:jmap:principals:availability,omitempty"` + MDN *SessionMDNAccountCapabilities `json:"urn:ietf:params:jmap:mdn,omitempty"` +} + +type Account struct { + // A user-friendly string to show when presenting content from this account, e.g., the email address representing the owner of the account. + Name string `json:"name,omitempty"` + // This is true if the account belongs to the authenticated user rather than a group account or a personal account of another user that has been shared with them. + IsPersonal bool `json:"isPersonal"` + // This is true if the entire account is read-only. + IsReadOnly bool `json:"isReadOnly"` + // Account capabilities. + AccountCapabilities SessionAccountCapabilities `json:"accountCapabilities"` +} + +type SessionCoreCapabilities struct { + // The maximum file size, in octets, that the server will accept for a single file upload (for any purpose) + MaxSizeUpload int `json:"maxSizeUpload"` + // The maximum number of concurrent requests the server will accept to the upload endpoint. + MaxConcurrentUpload int `json:"maxConcurrentUpload"` + // The maximum size, in octets, that the server will accept for a single request to the API endpoint. + MaxSizeRequest int `json:"maxSizeRequest"` + // The maximum number of concurrent requests the server will accept to the API endpoint. + MaxConcurrentRequests int `json:"maxConcurrentRequests"` + // The maximum number of method calls the server will accept in a single request to the API endpoint. + MaxCallsInRequest int `json:"maxCallsInRequest"` + // The maximum number of objects that the client may request in a single /get type method call. + MaxObjectsInGet int `json:"maxObjectsInGet"` + // The maximum number of objects the client may send to create, update, or destroy in a single /set type method call. + // This is the combined total, e.g., if the maximum is 10, you could not create 7 objects and destroy 6, as this would be 13 actions, + // which exceeds the limit. + MaxObjectsInSet int `json:"maxObjectsInSet"` + // A list of identifiers for algorithms registered in the collation registry, as defined in [@!RFC4790], that the server + // supports for sorting when querying records. + CollationAlgorithms []string `json:"collationAlgorithms,omitempty"` +} + +type SessionMailCapabilities struct { +} + +type SessionSubmissionCapabilities struct { +} + +type SessionVacationResponseCapabilities struct { +} + +type SessionSieveCapabilities struct { +} + +type SessionBlobCapabilities struct { +} + +type SessionQuotaCapabilities struct { +} + +type SessionWebsocketCapabilities struct { + // The wss-URI (see [Section 3 of RFC6455]) to use for initiating a JMAP-over-WebSocket + // handshake (the "WebSocket URL endpoint" colloquially). + // + // [Section 3 of RFC6455]: https://www.rfc-editor.org/rfc/rfc6455.html#section-3 + Url string `json:"url"` + + // This is true if the server supports push notifications over the WebSocket, + // as described in [Section 4.3.5 of RFC 8887]. + // + // [Section 4.3.5 of RFC 8887]: https://www.rfc-editor.org/rfc/rfc8887.html#name-jmap-push-notifications + SupportsPush bool `json:"supportsPush"` +} + +type SessionContactsCapabilities struct { +} + +type SessionCalendarsCapabilities struct { +} + +type SessionCalendarsParseCapabilities struct { +} + +type SessionTasksCapabilities struct { +} + +type SessionTasksRecurrencesCapabilities struct { +} + +type SessionTasksAssigneesCapabilities struct { +} + +type SessionTasksAlertsCapabilities struct { +} + +type SessionTasksMultilingualCapabilities struct { +} + +type SessionTasksCustomTimezonesCapabilities struct { +} + +type SessionPrincipalCapabilities struct { + // Id of Account with the urn:ietf:params:jmap:calendars capability that contains the calendar data + // for this Principal, or null if either (a) there is none (e.g. the Principal is a group just used + // for permissions management), or (b) the user does not have access to any data in the account + // (with the exception of free/busy, which is governed by the "mayGetAvailability" property). + // + // The corresponding Account object can be found in the Principal's "accounts" property, as + // per Section 2 of [RFC9670]. + AccountId string `json:"accountId,omitempty"` + + // If true, the user may call the "Principal/getAvailability" method with this Principal. + MayGetAvailability *bool `json:"mayGetAvailability,omitzero"` + + // If true, the user may add this Principal as a calendar share target (by adding them to the + // "shareWith" property of a calendar, see Section 4). + MayShareWith *bool `json:"mayShareWith,omitzero"` + + // If this Principal may be added as a participant to an event, this is the calendarAddress to + // use to receive iTIP scheduling messages. + CalendarAddress string `json:"calendarAddress,omitempty"` +} + +type SessionPrincipalAvailabilityCapabilities struct { +} + +type SessionMDNCapabilities struct { +} + +type SessionCapabilities struct { + Core *SessionCoreCapabilities `json:"urn:ietf:params:jmap:core,omitempty"` + Mail *SessionMailCapabilities `json:"urn:ietf:params:jmap:mail,omitempty"` + Submission *SessionSubmissionCapabilities `json:"urn:ietf:params:jmap:submission,omitempty"` + VacationResponse *SessionVacationResponseCapabilities `json:"urn:ietf:params:jmap:vacationresponse,omitempty"` + Sieve *SessionSieveCapabilities `json:"urn:ietf:params:jmap:sieve,omitempty"` + Blob *SessionBlobCapabilities `json:"urn:ietf:params:jmap:blob,omitempty"` + Quota *SessionQuotaCapabilities `json:"urn:ietf:params:jmap:quota,omitempty"` + Websocket *SessionWebsocketCapabilities `json:"urn:ietf:params:jmap:websocket,omitempty"` + + // This represents support for the `AddressBook` and `ContactCard` data types and associated API methods. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + Contacts *SessionContactsCapabilities `json:"urn:ietf:params:jmap:contacts,omitempty"` + + // This represents support for the `Calendar`, `CalendarEvent`, `CalendarEventNotification`, + // and `ParticipantIdentity` data types and associated API methods, except for `CalendarEvent/parse`. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + Calendars *SessionCalendarsCapabilities `json:"urn:ietf:params:jmap:calendars,omitempty"` + + // This represents support for the `CalendarEvent/parse` method. + // + // The value of this property is an empty object in the JMAP session `capabilities` property. + CalendarsParse *SessionCalendarsParseCapabilities `json:"urn:ietf:params:jmap:calendars:parse,omitempty"` + + // This represents support for the core properties and objects of the `TaskList`, + // `Task` and `TaskNotification` data types and associated API methods. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + Tasks *SessionTasksCapabilities `json:"urn:ietf:params:jmap:tasks,omitempty"` + + // This represents support for the `recurrence` properties and objects of the `TaskList`, + // `Task` and `TaskNotification` data types and associated API methods. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + TasksRecurrences *SessionTasksRecurrencesCapabilities `json:"urn:ietf:params:jmap:tasks:recurrences,omitempty"` + + // This represents support for the `assignee` properties and objects of the `TaskList`, + // `Task` and `TaskNotification` data types and associated API methods. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + TasksAssignees *SessionTasksAssigneesCapabilities `json:"urn:ietf:params:jmap:tasks:assignees,omitempty"` + + // This represents support for the `alerts` properties and objects of the `TaskList`, + // `Task` and `TaskNotification` data types and associated API methods. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + TasksAlerts *SessionTasksAlertsCapabilities `json:"urn:ietf:params:jmap:tasks:alerts,omitempty"` + + // This represents support for the multilingual properties and objects of the `TaskList`, + // `Task` and `TaskNotification` data types and associated API methods. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + TasksMultilingual *SessionTasksMultilingualCapabilities `json:"urn:ietf:params:jmap:tasks:multilingual,omitempty"` + + // This represents support for the custom time zone properties and objects of the `TaskList`, + // `Task` and `TaskNotification` data types and associated API methods. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + TasksCustomTimezones *SessionTasksCustomTimezonesCapabilities `json:"urn:ietf:params:jmap:tasks:customtimezones,omitempty"` + + Principals *SessionPrincipalCapabilities `json:"urn:ietf:params:jmap:principals,omitempty"` + + // Represents support for the `Principal/getAvailability` method. + // + // Any account with this capability MUST also have the `urn:ietf:params:jmap:principals` capability. + // + // The value of this property in the JMAP Session `capabilities` property is an empty object. + PrincipalsAvailability *SessionPrincipalAvailabilityCapabilities `json:"urn:ietf:params:jmap:principals:availability,omitempty"` + + MDN *SessionMDNCapabilities `json:"urn:ietf:params:jmap:mdn,omitempty"` +} + +type SessionPrimaryAccounts struct { + Core string `json:"urn:ietf:params:jmap:core,omitempty"` + Mail string `json:"urn:ietf:params:jmap:mail,omitempty"` + Submission string `json:"urn:ietf:params:jmap:submission,omitempty"` + VacationResponse string `json:"urn:ietf:params:jmap:vacationresponse,omitempty"` + Sieve string `json:"urn:ietf:params:jmap:sieve,omitempty"` + Blob string `json:"urn:ietf:params:jmap:blob,omitempty"` + Quota string `json:"urn:ietf:params:jmap:quota,omitempty"` + Websocket string `json:"urn:ietf:params:jmap:websocket,omitempty"` + Task string `json:"urn:ietf:params:jmap:task,omitempty"` + Calendars string `json:"urn:ietf:params:jmap:calendars,omitempty"` + CalendarsParse string `json:"urn:ietf:params:jmap:calendars:parse,omitempty"` + Contacts string `json:"urn:ietf:params:jmap:contacts,omitempty"` + ContactsParse string `json:"urn:ietf:params:jmap:contacts:parse,omitempty"` +} + +type SessionState string + +type State string + +type Language string + +type SessionResponse struct { + Capabilities SessionCapabilities `json:"capabilities"` + + Accounts map[string]Account `json:"accounts,omitempty"` + + // A map of capability URIs (as found in accountCapabilities) to the account id that is considered to be the user’s main or default + // account for data pertaining to that capability. + // If no account being returned belongs to the user, or in any other way there is no appropriate way to determine a default account, + // there MAY be no entry for a particular URI, even though that capability is supported by the server (and in the capabilities object). + // urn:ietf:params:jmap:core SHOULD NOT be present. + PrimaryAccounts SessionPrimaryAccounts `json:"primaryAccounts"` + + // The username associated with the given credentials, or the empty string if none. + Username string `json:"username,omitempty"` + + // The URL to use for JMAP API requests. + ApiUrl string `json:"apiUrl,omitempty"` + + // The URL endpoint to use when downloading files, in URI Template (level 1) format [@!RFC6570]. + // The URL MUST contain variables called accountId, blobId, type, and name. + DownloadUrl string `json:"downloadUrl,omitempty"` + + // The URL endpoint to use when uploading files, in URI Template (level 1) format [@!RFC6570]. + // The URL MUST contain a variable called accountId. + UploadUrl string `json:"uploadUrl,omitempty"` + + // The URL to connect to for push events, as described in Section 7.3, in URI Template (level 1) format [@!RFC6570]. + // The URL MUST contain variables called types, closeafter, and ping. + EventSourceUrl string `json:"eventSourceUrl,omitempty"` + + // A (preferably short) string representing the state of this object on the server. + // If the value of any other property on the Session object changes, this string will change. + // The current value is also returned on the API Response object (see Section 3.4), allowing clients to quickly + // determine if the session information has changed (e.g., an account has been added or removed), + // so they need to refetch the object. + State SessionState `json:"state,omitempty"` +} + +// Method level error types. +const ( + // Some internal server resource was temporarily unavailable. + // + // Attempting the same operation later (perhaps after a backoff with a random factor) may succeed. + MethodLevelErrorServerUnavailable = "serverUnavailable" + + // An unexpected or unknown error occurred during the processing of the call. + // + // A description property should provide more details about the error. The method call made no changes + // to the server’s state. Attempting the same operation again is expected to fail again. + // Contacting the service administrator is likely necessary to resolve this problem if it is persistent. + MethodLevelErrorServerFail = "serverFail" + + // Some, but not all, expected changes described by the method occurred. + // + // The client MUST resynchronise impacted data to determine server state. Use of this error is strongly discouraged. + MethodLevelErrorServerPartialFail = "serverPartialFail" + + // The server does not recognise this method name. + MethodLevelErrorUnknownMethod = "unknownMethod" + + // One of the arguments is of the wrong type or is otherwise invalid, or a required argument is missing. + // + // A description property MAY be present to help debug with an explanation of what the problem was. + // This is a non-localised string, and it is not intended to be shown directly to end users. + MethodLevelErrorInvalidArguments = "invalidArguments" + + // The method used a result reference for one of its arguments, but this failed to resolve. + MethodLevelErrorInvalidResultReference = "invalidResultReference" + + // The method and arguments are valid, but executing the method would violate an Access Control List + // (ACL) or other permissions policy. + MethodLevelErrorForbidden = "forbidden" + + // The accountId does not correspond to a valid account. + MethodLevelErrorAccountNotFound = "accountNotFound" + + // The accountId given corresponds to a valid account, but the account does not support this method or data type. + MethodLevelErrorAccountNotSupportedByMethod = "accountNotSupportedByMethod" + + // This method modifies state, but the account is read-only (as returned on the corresponding Account object in + // the JMAP Session resource). + MethodLevelErrorAccountReadOnly = "accountReadOnly" +) + +// SetError type values. +const ( + // The create/update/destroy would violate an ACL or other permissions policy. + // + // (create; update; destroy). + SetErrorTypeForbidden = "forbidden" + + // The create would exceed a server-defined limit on the number or total size of objects of this type. + // + // (create; update). + SetErrorTypeOverQuota = "overQuota" + + // The create/update would result in an object that exceeds a server-defined limit for the maximum + // size of a single object of this type. + // + // (create; update). + SetErrorTypeTooLarge = "tooLarge" + + // Too many objects of this type have been created recently, and a server-defined rate limit has been reached. + // It may work if tried again later. + // + // (create). + SetErrorTypeRateLimit = "rateLimit" + + // The id given to update/destroy cannot be found. + // + // (update; destroy). + SetErrorTypeNotFound = "notFound" + + // The PatchObject given to update the record was not a valid patch (see the patch description). + // + // (update). + SetErrorTypeInvalidPatch = "invalidPatch" + + // The client requested that an object be both updated and destroyed in the same /set request, and the server + // has decided to therefore ignore the update. + // + // (update). + SetErrorTypeWillDestroy = "willDestroy" + + // The record given is invalid in some way. For example: + // + // - It contains properties that are invalid according to the type specification of this record type. + // - It contains a property that may only be set by the server (e.g., “id”) and is different to the current value. + // Note, to allow clients to pass whole objects back, it is not an error to include a server-set property in an + // update as long as the value is identical to the current value on the server. + // - There is a reference to another record (foreign key), and the given id does not correspond to a valid record. + // + // The SetError object SHOULD also have a property called properties of type String[] that lists all the properties + // that were invalid. + // + // Individual methods MAY specify more specific errors for certain conditions that would otherwise result in an + // invalidProperties error. If the condition of one of these is met, it MUST be returned instead of the invalidProperties error. + // + // (create; update). + SetErrorTypeInvalidProperties = "invalidProperties" + + // This is a singleton type, so you cannot create another one or destroy the existing one. + // + // (create; destroy). + SetErrorTypeSingleton = "singleton" + + // The total number of objects to create, update, or destroy exceeds the maximum number the server is + // willing to process in a single method call. + SetErrorTypeRequestTooLarge = "requestTooLarge" + + // An ifInState argument was supplied, and it does not match the current state. + SetErrorTypeStateMismatch = "stateMismatch" + + // The Email to be sent is invalid in some way. + // + // The SetError SHOULD contain a property called properties of type String[] that lists all the properties + // of the Email that were invalid. + SetErrorInvalidEmail = "invalidEmail" + + // The envelope (supplied or generated) has more recipients than the server allows. + // + // A maxRecipients UnsignedInt property MUST also be present on the SetError specifying + // the maximum number of allowed recipients. + SetErrorTooManyRecipients = "tooManyRecipients" + + // The envelope (supplied or generated) does not have any rcptTo email addresses. + SetErrorNoRecipients = "noRecipients" + + // The rcptTo property of the envelope (supplied or generated) contains at least one rcptTo value which + // is not a valid email address for sending to. + // + // An invalidRecipients String[] property MUST also be present on the SetError, which is a list of the invalid addresses. + SetErrorInvalidRecipients = "invalidRecipients" + + // The server does not permit the user to send a message with this envelope From address [RFC5321]. + // + // [RFC5321]: https://datatracker.ietf.org/doc/html/rfc5321 + SetErrorForbiddenMailFrom = "forbiddenMailFrom" + + // The server does not permit the user to send a message with the From header field [RFC5322] of the message to be sent. + // + // [RFC5322]: https://datatracker.ietf.org/doc/html/rfc5322 + SetErrorForbiddenFrom = "forbiddenFrom" + + // The user does not have permission to send at all right now for some reason. + // + // A description String property MAY be present on the SetError object to display to the user why they are not permitted. + SetErrorForbiddenToSend = "forbiddenToSend" + + // The message has the `$mdnsent` keyword already set. + SetErrorMdnAlreadySent = "mdnAlreadySent" +) + +type SetError struct { + // The type of error. + Type string `json:"type"` + + // A description of the error to help with debugging that includes an explanation of what the problem was. + // + // This is a non-localised string and is not intended to be shown directly to end users. + Description string `json:"description,omitempty"` + + // Lists all the properties of the Email that were invalid. + // + // Only set for the invalidEmail error after a failed EmailSubmission/set errors. + Properties []string `json:"properties,omitempty"` + + // Specifies the maximum number of allowed recipients. + // + // Only set for the tooManyRecipients error after a failed EmailSubmission/set errors. + MaxRecipients int `json:"maxRecipients,omitzero"` + + // List of invalid addresses. + // + // Only set for the invalidRecipients error after a failed EmailSubmission/set errors. + InvalidRecipients []string `json:"invalidRecipients,omitempty"` +} + +type FilterOperatorTerm string + +const ( + And FilterOperatorTerm = "AND" + Or FilterOperatorTerm = "OR" + Not FilterOperatorTerm = "NOT" +) + +type MailboxRights struct { + // If true, the user may use this Mailbox as part of a filter in an Email/query call, + // and the Mailbox may be included in the mailboxIds property of Email objects. + // + // Email objects may be fetched if they are in at least one Mailbox with this permission. + // + // If a sub-Mailbox is shared but not the parent Mailbox, this may be false. + // + // Corresponds to IMAP ACLs lr (if mapping from IMAP, both are required for this to be true). + MayReadItems bool `json:"mayReadItems"` + + // The user may add mail to this Mailbox (by either creating a new Email or moving an existing one). + // + // Corresponds to IMAP ACL i. + MayAddItems bool `json:"mayAddItems"` + + // The user may remove mail from this Mailbox (by either changing the Mailboxes of an Email or + // destroying the Email). + // + // Corresponds to IMAP ACLs te (if mapping from IMAP, both are required for this to be true). + MayRemoveItems bool `json:"mayRemoveItems"` + + // The user may add or remove the $seen keyword to/from an Email. + // + // If an Email belongs to multiple Mailboxes, the user may only modify $seen if they have this + // permission for all of the Mailboxes. + // + // Corresponds to IMAP ACL s. + MaySetSeen bool `json:"maySetSeen"` + + // The user may add or remove any keyword other than $seen to/from an Email. + // + // If an Email belongs to multiple Mailboxes, the user may only modify keywords if they have this + // permission for all of the Mailboxes. + // + // Corresponds to IMAP ACL w. + MaySetKeywords bool `json:"maySetKeywords"` + + // The user may create a Mailbox with this Mailbox as its parent. + // + // Corresponds to IMAP ACL k. + MayCreateChild bool `json:"mayCreateChild"` + + // The user may rename the Mailbox or make it a child of another Mailbox. + // + // Corresponds to IMAP ACL x (although this covers both rename and delete permissions). + MayRename bool `json:"mayRename"` + + // The user may delete the Mailbox itself. + // + // Corresponds to IMAP ACL x (although this covers both rename and delete permissions). + MayDelete bool `json:"mayDelete"` + + // Messages may be submitted directly to this Mailbox. + // + // Corresponds to IMAP ACL p. + MaySubmit bool `json:"maySubmit"` +} + +type Mailbox struct { + // The id of the Mailbox. + Id string `json:"id,omitempty"` + + // User-visible name for the Mailbox, e.g., “Inbox”. + // + // This MUST be a Net-Unicode string [@!RFC5198] of at least 1 character in length, subject to the maximum size + // given in the capability object. + // + // There MUST NOT be two sibling Mailboxes with both the same parent and the same name. + // + // Servers MAY reject names that violate server policy (e.g., names containing a slash (/) or control characters). + Name string `json:"name,omitempty"` + + // The Mailbox id for the parent of this Mailbox, or null if this Mailbox is at the top level. + // + // Mailboxes form acyclic graphs (forests) directed by the child-to-parent relationship. There MUST NOT be a loop. + ParentId string `json:"parentId,omitempty"` + + // Identifies Mailboxes that have a particular common purpose (e.g., the “inbox”), regardless of the name property + // (which may be localised). + // + // This value is shared with IMAP (exposed in IMAP via the SPECIAL-USE extension [RFC6154]). + // However, unlike in IMAP, a Mailbox MUST only have a single role, and there MUST NOT be two Mailboxes in the same + // account with the same role. + // + // Servers providing IMAP access to the same data are encouraged to enforce these extra restrictions in IMAP as well. + // Otherwise, modifying the IMAP attributes to ensure compliance when exposing the data over JMAP is implementation dependent. + // + // The value MUST be one of the Mailbox attribute names listed in the IANA IMAP Mailbox Name Attributes registry, + // as established in [RFC8457], converted to lowercase. New roles may be established here in the future. + // + // An account is not required to have Mailboxes with any particular roles. + // + // [RFC6154]: https://www.rfc-editor.org/rfc/rfc6154.html + // [RFC8457]: https://www.rfc-editor.org/rfc/rfc8457.html + Role string `json:"role,omitempty"` + + // Defines the sort order of Mailboxes when presented in the client’s UI, so it is consistent between devices. + // + // Default value: 0 + // + // The number MUST be an integer in the range 0 <= sortOrder < 2^31. + // + // A Mailbox with a lower order should be displayed before a Mailbox with a higher order + // (that has the same parent) in any Mailbox listing in the client’s UI. + // Mailboxes with equal order SHOULD be sorted in alphabetical order by name. + // The sorting should take into account locale-specific character order convention. + SortOrder *int `json:"sortOrder,omitempty"` + + // The number of Emails in this Mailbox. + TotalEmails int `json:"totalEmails"` + + // The number of Emails in this Mailbox that have neither the $seen keyword nor the $draft keyword. + UnreadEmails int `json:"unreadEmails"` + + // The number of Threads where at least one Email in the Thread is in this Mailbox. + TotalThreads int `json:"totalThreads"` + + // An indication of the number of “unread” Threads in the Mailbox. + UnreadThreads int `json:"unreadThreads"` + + // The set of rights (Access Control Lists (ACLs)) the user has in relation to this Mailbox. + // + // These are backwards compatible with IMAP ACLs, as defined in [RFC4314]. + // + // [RFC4314]: https://www.rfc-editor.org/rfc/rfc4314.html + MyRights *MailboxRights `json:"myRights,omitempty"` + + // Has the user indicated they wish to see this Mailbox in their client? + // + // This SHOULD default to false for Mailboxes in shared accounts the user has access to and true + // for any new Mailboxes created by the user themself. + // + // This MUST be stored separately per user where multiple users have access to a shared Mailbox. + // + // A user may have permission to access a large number of shared accounts, or a shared account with a very + // large set of Mailboxes, but only be interested in the contents of a few of these. + // + // Clients may choose to only display Mailboxes where the isSubscribed property is set to true, and offer + // a separate UI to allow the user to see and subscribe/unsubscribe from the full set of Mailboxes. + // + // However, clients MAY choose to ignore this property, either entirely for ease of implementation or just + // for an account where isPersonal is true (indicating it is the user’s own rather than a shared account). + // + // This property corresponds to IMAP [RFC3501] Mailbox subscriptions. + // + // [RFC3501]: https://www.rfc-editor.org/rfc/rfc3501.html + IsSubscribed *bool `json:"isSubscribed,omitempty"` +} + +type MailboxChange struct { + // User-visible name for the Mailbox, e.g., “Inbox”. + // + // This MUST be a Net-Unicode string [@!RFC5198] of at least 1 character in length, subject to the maximum size + // given in the capability object. + // + // There MUST NOT be two sibling Mailboxes with both the same parent and the same name. + // + // Servers MAY reject names that violate server policy (e.g., names containing a slash (/) or control characters). + Name string `json:"name,omitempty"` + + // The Mailbox id for the parent of this Mailbox, or null if this Mailbox is at the top level. + // + // Mailboxes form acyclic graphs (forests) directed by the child-to-parent relationship. There MUST NOT be a loop. + ParentId string `json:"parentId,omitempty"` + + // Identifies Mailboxes that have a particular common purpose (e.g., the “inbox”), regardless of the name property + // (which may be localised). + // + // This value is shared with IMAP (exposed in IMAP via the SPECIAL-USE extension [RFC6154]). + // However, unlike in IMAP, a Mailbox MUST only have a single role, and there MUST NOT be two Mailboxes in the same + // account with the same role. + // + // Servers providing IMAP access to the same data are encouraged to enforce these extra restrictions in IMAP as well. + // Otherwise, modifying the IMAP attributes to ensure compliance when exposing the data over JMAP is implementation dependent. + // + // The value MUST be one of the Mailbox attribute names listed in the IANA IMAP Mailbox Name Attributes registry, + // as established in [RFC8457], converted to lowercase. New roles may be established here in the future. + // + // An account is not required to have Mailboxes with any particular roles. + // + // [RFC6154]: https://www.rfc-editor.org/rfc/rfc6154.html + // [RFC8457]: https://www.rfc-editor.org/rfc/rfc8457.html + Role string `json:"role,omitempty"` + + // Defines the sort order of Mailboxes when presented in the client’s UI, so it is consistent between devices. + // + // Default value: 0 + // + // The number MUST be an integer in the range 0 <= sortOrder < 2^31. + // + // A Mailbox with a lower order should be displayed before a Mailbox with a higher order + // (that has the same parent) in any Mailbox listing in the client’s UI. + // Mailboxes with equal order SHOULD be sorted in alphabetical order by name. + // The sorting should take into account locale-specific character order convention. + SortOrder *int `json:"sortOrder,omitempty"` + + // Has the user indicated they wish to see this Mailbox in their client? + // + // This SHOULD default to false for Mailboxes in shared accounts the user has access to and true + // for any new Mailboxes created by the user themself. + // + // This MUST be stored separately per user where multiple users have access to a shared Mailbox. + // + // A user may have permission to access a large number of shared accounts, or a shared account with a very + // large set of Mailboxes, but only be interested in the contents of a few of these. + // + // Clients may choose to only display Mailboxes where the isSubscribed property is set to true, and offer + // a separate UI to allow the user to see and subscribe/unsubscribe from the full set of Mailboxes. + // + // However, clients MAY choose to ignore this property, either entirely for ease of implementation or just + // for an account where isPersonal is true (indicating it is the user’s own rather than a shared account). + // + // This property corresponds to IMAP [RFC3501] Mailbox subscriptions. + // + // [RFC3501]: https://www.rfc-editor.org/rfc/rfc3501.html + IsSubscribed *bool `json:"isSubscribed,omitempty"` +} + +func (m MailboxChange) AsPatch() PatchObject { + p := PatchObject{} + if m.Name != "" { + p["name"] = m.Name + } + if m.ParentId != "" { + p["parentId"] = m.ParentId + } + if m.Role != "" { + p["role"] = m.Role + } + if m.SortOrder != nil { + p["sortOrder"] = m.SortOrder + } + if m.IsSubscribed != nil { + p["isSubscribed"] = m.IsSubscribed + } + return p +} + +type MailboxGetCommand struct { + AccountId string `json:"accountId"` + Ids []string `json:"ids,omitempty"` +} + +type MailboxGetRefCommand struct { + AccountId string `json:"accountId"` + IdsRef *ResultReference `json:"#ids,omitempty"` +} + +type MailboxSetCommand struct { + AccountId string `json:"accountId"` + IfInState string `json:"ifInState,omitempty"` + Create map[string]MailboxChange `json:"create,omitempty"` + Update map[string]PatchObject `json:"update,omitempty"` + Destroy []string `json:"destroy,omitempty"` +} + +type MailboxSetResponse struct { + AccountId string `json:"accountId"` + OldState State `json:"oldState,omitempty"` + NewState State `json:"newState,omitempty"` + Created map[string]Mailbox `json:"created,omitempty"` + Updated map[string]Mailbox `json:"updated,omitempty"` + Destroyed []string `json:"destroyed,omitempty"` + NotCreated map[string]SetError `json:"notCreated,omitempty"` + NotUpdated map[string]SetError `json:"notUpdated,omitempty"` + NotDestroyed map[string]SetError `json:"notDestroyed,omitempty"` +} + +type MailboxChangesCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // The current state of the client. + // + // This is the string that was returned as the state argument in the Mailbox/get response. + // + // The server will return the changes that have occurred since this state. + SinceState string `json:"sinceState,omitempty"` + + // The maximum number of ids to return in the response. + // + // The server MAY choose to return fewer than this value but MUST NOT return more. + // + // If not given by the client, the server may choose how many to return. + // + // If supplied by the client, the value MUST be a positive integer greater than 0. + // + // If a value outside of this range is given, the server MUST reject the call with an invalidArguments error. + MaxChanges uint `json:"maxChanges,omitzero"` +} + +type MailboxFilterElement interface { + _isAMailboxFilterElement() // marker method +} + +type MailboxFilterCondition struct { + ParentId string `json:"parentId,omitempty"` + Name string `json:"name,omitempty"` + Role string `json:"role,omitempty"` + HasAnyRole *bool `json:"hasAnyRole,omitempty"` + IsSubscribed *bool `json:"isSubscribed,omitempty"` +} + +func (c MailboxFilterCondition) _isAMailboxFilterElement() {} + +var _ MailboxFilterElement = &MailboxFilterCondition{} + +type MailboxFilterOperator struct { + Operator FilterOperatorTerm `json:"operator"` + Conditions []MailboxFilterElement `json:"conditions,omitempty"` +} + +func (c MailboxFilterOperator) _isAMailboxFilterElement() {} + +var _ MailboxFilterElement = &MailboxFilterOperator{} + +type MailboxComparator struct { + Property string `json:"property"` + IsAscending bool `json:"isAscending,omitempty"` + Limit int `json:"limit,omitzero"` + CalculateTotal bool `json:"calculateTotal,omitempty"` +} + +type MailboxQueryCommand struct { + AccountId string `json:"accountId"` + Filter MailboxFilterElement `json:"filter,omitempty"` + Sort []MailboxComparator `json:"sort,omitempty"` + SortAsTree bool `json:"sortAsTree,omitempty"` + FilterAsTree bool `json:"filterAsTree,omitempty"` +} + +type EmailFilterElement interface { + _isAnEmailFilterElement() // marker method + IsNotEmpty() bool +} + +type EmailFilterCondition struct { + // A Mailbox id. + // + // An Email must be in this Mailbox to match the condition. + InMailbox string `json:"inMailbox,omitempty"` + + // A list of Mailbox ids. + // + // An Email must be in at least one Mailbox not in this list to match the condition. + // + // This is to allow messages solely in trash/spam to be easily excluded from a search. + InMailboxOtherThan []string `json:"inMailboxOtherThan,omitempty"` + + // The receivedAt date-time of the Email must be before this date-time to match + // the condition. + Before time.Time `json:"before,omitzero"` // omitzero requires Go 1.24 + + // The receivedAt date-time of the Email must be the same or after this date-time + // to match the condition. + After time.Time `json:"after,omitzero"` + + // The size property of the Email must be equal to or greater than this number to match + // the condition. + MinSize int `json:"minSize,omitempty"` + + // The size property of the Email must be less than this number to match the condition. + MaxSize int `json:"maxSize,omitempty"` + + // All Emails (including this one) in the same Thread as this Email must have the given + // keyword to match the condition. + AllInThreadHaveKeyword string `json:"allInThreadHaveKeyword,omitempty"` + + // At least one Email (possibly this one) in the same Thread as this Email must have the + // given keyword to match the condition. + SomeInThreadHaveKeyword string `json:"someInThreadHaveKeyword,omitempty"` + + // All Emails (including this one) in the same Thread as this Email must not have the + // given keyword to match the condition. + NoneInThreadHaveKeyword string `json:"noneInThreadHaveKeyword,omitempty"` + + // This Email must have the given keyword to match the condition. + HasKeyword string `json:"hasKeyword,omitempty"` + + // This Email must not have the given keyword to match the condition. + NotKeyword string `json:"notKeyword,omitempty"` + + // The hasAttachment property of the Email must be identical to the value given to match + // the condition. + HasAttachment bool `json:"hasAttachment,omitempty"` + + // Looks for the text in Emails. + // + // The server MUST look up text in the From, To, Cc, Bcc, and Subject header fields of the + // message and SHOULD look inside any text/* or other body parts that may be converted to + // text by the server. + // + // The server MAY extend the search to any additional textual property. + Text string `json:"text,omitempty"` + + // Looks for the text in the From header field of the message. + From string `json:"from,omitempty"` + + // Looks for the text in the To header field of the message. + To string `json:"to,omitempty"` + + // Looks for the text in the Cc header field of the message. + Cc string `json:"cc,omitempty"` + + // Looks for the text in the Bcc header field of the message. + Bcc string `json:"bcc,omitempty"` + + // Looks for the text in the Subject header field of the message. + Subject string `json:"subject,omitempty"` + + // Looks for the text in one of the body parts of the message. + // + // The server MAY exclude MIME body parts with content media types other than text/* + // and message/* from consideration in search matching. + // + // Care should be taken to match based on the text content actually presented to an end user + // by viewers for that media type or otherwise identified as appropriate for search indexing. + // + // Matching document metadata uninteresting to an end user (e.g., markup tag and attribute + // names) is undesirable. + Body string `json:"body,omitempty"` + + // The array MUST contain either one or two elements. + // + // The first element is the name of the header field to match against. + // + // The second (optional) element is the text to look for in the header field value. + // + // If not supplied, the message matches simply if it has a header field of the given name. + Header []string `json:"header,omitempty"` +} + +func (f EmailFilterCondition) _isAnEmailFilterElement() { +} + +func (f EmailFilterCondition) IsNotEmpty() bool { + if !f.After.IsZero() { + return true + } + if f.AllInThreadHaveKeyword != "" { + return true + } + if len(f.Bcc) > 0 { + return true + } + if !f.Before.IsZero() { + return true + } + if f.Body != "" { + return true + } + if f.Cc != "" { + return true + } + if f.From != "" { + return true + } + if f.HasAttachment { + return true + } + if f.HasKeyword != "" { + return true + } + if len(f.Header) > 0 { + return true + } + if f.InMailbox != "" { + return true + } + if len(f.InMailboxOtherThan) > 0 { + return true + } + if f.MaxSize != 0 { + return true + } + if f.MinSize != 0 { + return true + } + if f.NoneInThreadHaveKeyword != "" { + return true + } + if f.NotKeyword != "" { + return true + } + if f.SomeInThreadHaveKeyword != "" { + return true + } + if f.Subject != "" { + return true + } + if f.Text != "" { + return true + } + if f.To != "" { + return true + } + return false +} + +var _ EmailFilterElement = &EmailFilterCondition{} + +type EmailFilterOperator struct { + Operator FilterOperatorTerm `json:"operator"` + Conditions []EmailFilterElement `json:"conditions,omitempty"` +} + +func (o EmailFilterOperator) _isAnEmailFilterElement() { +} + +func (o EmailFilterOperator) IsNotEmpty() bool { + return len(o.Conditions) > 0 +} + +var _ EmailFilterElement = &EmailFilterOperator{} + +type EmailComparator struct { + // The name of the property on the objects to compare. + Property string `json:"property,omitempty"` + + // If true, sort in ascending order. + // + // Optional; default value: true. + // + // If false, reverse the comparator’s results to sort in descending order. + IsAscending bool `json:"isAscending,omitempty"` + + // The identifier, as registered in the collation registry defined in [RFC4790], + // for the algorithm to use when comparing the order of strings. + // + // Optional; default is server dependent. + // + // The algorithms the server supports are advertised in the capabilities object returned + // with the Session object. + // + // [RFC4790]: https://www.rfc-editor.org/rfc/rfc4790.html + Collation string `json:"collation,omitempty"` + + // Email-specific: keyword that must be included in the Email object. + Keyword string `json:"keyword,omitempty"` +} + +// If an anchor argument is given, the anchor is looked for in the results after filtering +// and sorting. +// +// If found, the anchorOffset is then added to its index. If the resulting index is now negative, +// it is clamped to 0. This index is now used exactly as though it were supplied as the position +// argument. If the anchor is not found, the call is rejected with an anchorNotFound error. +// +// If an anchor is specified, any position argument supplied by the client MUST be ignored. +// If no anchor is supplied, any anchorOffset argument MUST be ignored. +// +// A client can use anchor instead of position to find the index of an id within a large set of results. +type EmailQueryCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // Determines the set of Emails returned in the results. + // + // If null, all objects in the account of this type are included in the results. + Filter EmailFilterElement `json:"filter,omitempty"` + + // Lists the names of properties to compare between two Email records, and how to compare + // them, to determine which comes first in the sort. + // + // If two Email records have an identical value for the first comparator, the next comparator + // will be considered, and so on. If all comparators are the same (this includes the case + // where an empty array or null is given as the sort argument), the sort order is server + // dependent, but it MUST be stable between calls to Email/query. + Sort []EmailComparator `json:"sort,omitempty"` + + // If true, Emails in the same Thread as a previous Email in the list (given the + // filter and sort order) will be removed from the list. + // + // This means only one Email at most will be included in the list for any given Thread. + CollapseThreads bool `json:"collapseThreads,omitempty"` + + // The zero-based index of the first id in the full list of results to return. + // + // If a negative value is given, it is an offset from the end of the list. + // Specifically, the negative value MUST be added to the total number of results given + // the filter, and if still negative, it’s clamped to 0. This is now the zero-based + // index of the first id to return. + // + // If the index is greater than or equal to the total number of objects in the results + // list, then the ids array in the response will be empty, but this is not an error. + Position int `json:"position,omitempty"` + + // An Email id. + // + // If supplied, the position argument is ignored. + // The index of this id in the results will be used in combination with the anchorOffset + // argument to determine the index of the first result to return. + Anchor string `json:"anchor,omitempty"` + + // The index of the first result to return relative to the index of the anchor, + // if an anchor is given. + // + // Default: 0. + // + // This MAY be negative. + // + // For example, -1 means the Email immediately preceding the anchor is the first result in + // the list returned. + AnchorOffset int `json:"anchorOffset,omitzero"` + + // The maximum number of results to return. + // + // If null, no limit presumed. + // The server MAY choose to enforce a maximum limit argument. + // In this case, if a greater value is given (or if it is null), the limit is clamped + // to the maximum; the new limit is returned with the response so the client is aware. + // + // If a negative value is given, the call MUST be rejected with an invalidArguments error. + Limit *uint `json:"limit,omitempty"` + + // Does the client wish to know the total number of results in the query? + // + // This may be slow and expensive for servers to calculate, particularly with complex filters, + // so clients should take care to only request the total when needed. + CalculateTotal bool `json:"calculateTotal,omitempty"` +} + +type EmailGetCommand struct { + // The ids of the Email objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that + // data type and the number of records does not exceed the maxObjectsInGet limit. + Ids []string `json:"ids,omitempty"` + + // The id of the account to use. + AccountId string `json:"accountId"` + + // If supplied, only the properties listed in the array are returned for each Email object. + // + // If null, the following properties are returned: + // + // [ "id", "blobId", "threadId", "mailboxIds", "keywords", "size", + // "receivedAt", "messageId", "inReplyTo", "references", "sender", "from", + // "to", "cc", "bcc", "replyTo", "subject", "sentAt", "hasAttachment", + // "preview", "bodyValues", "textBody", "htmlBody", "attachments" ] + // + // The id property of the object is always returned, even if not explicitly requested. + // + // If an invalid property is requested, the call MUST be rejected with an invalidArguments error. + Properties []string `json:"properties,omitempty"` + + // A list of properties to fetch for each EmailBodyPart returned. + // + // If omitted, this defaults to: + // + // [ "partId", "blobId", "size", "name", "type", "charset", "disposition", "cid", "language", "location" ] + // + BodyProperties []string `json:"bodyProperties,omitempty"` + + // (default: false) If true, the bodyValues property includes any text/* part in the textBody property. + FetchTextBodyValues bool `json:"fetchTextBodyValues,omitzero"` + + // (default: false) If true, the bodyValues property includes any text/* part in the htmlBody property. + FetchHTMLBodyValues bool `json:"fetchHTMLBodyValues,omitzero"` + + // (default: false) If true, the bodyValues property includes any text/* part in the bodyStructure property. + FetchAllBodyValues bool `json:"fetchAllBodyValues,omitzero"` + + // If greater than zero, the value property of any EmailBodyValue object returned in bodyValues + // MUST be truncated if necessary so it does not exceed this number of octets in size. + // + // If 0 (the default), no truncation occurs. + // + // The server MUST ensure the truncation results in valid UTF-8 and does not occur mid-codepoint. + // + // If the part is of type text/html, the server SHOULD NOT truncate inside an HTML tag, e.g., in + // the middle of
. + // + // There is no requirement for the truncated form to be a balanced tree or valid HTML (indeed, the original + // source may well be neither of these things). + MaxBodyValueBytes uint `json:"maxBodyValueBytes,omitempty"` +} + +// Reference to Previous Method Results +// +// To allow clients to make more efficient use of the network and avoid round trips, an argument to one method +// can be taken from the result of a previous method call in the same request. +// +// To do this, the client prefixes the argument name with # (an [octothorpe]). +// +// When processing a method call, the server MUST first check the arguments object for any names beginning with #. +// +// If found, the result reference should be resolved and the value used as the “real” argument. +// +// The method is then processed as normal. +// +// If any result reference fails to resolve, the whole method MUST be rejected with an invalidResultReference error. +// +// If an arguments object contains the same argument name in normal and referenced form (e.g., foo and #foo), +// the method MUST return an invalidArguments error. +// +// To resolve: +// +// 1. Find the first response with a method call id identical to the resultOf property of the ResultReference +// in the methodResponses array from previously processed method calls in the same request. +// If none, evaluation fails. +// 2. If the response name is not identical to the name property of the ResultReference, evaluation fails. +// 3. Apply the path to the arguments object of the response (the second item in the response array) +// following the JSON Pointer algorithm [RFC6901], except with the following addition in “Evaluation” (see Section 4): +// 4. If the currently referenced value is a JSON array, the reference token may be exactly the single character *, +// making the new referenced value the result of applying the rest of the JSON Pointer tokens to every item in the +// array and returning the results in the same order in a new array. +// 5. If the result of applying the rest of the pointer tokens to each item was itself an array, the contents of this +// array are added to the output rather than the array itself (i.e., the result is flattened from an array of +// arrays to a single array). +// +// [octothorpe]; https://en.wiktionary.org/wiki/octothorpe +// [RFC6901]: https://datatracker.ietf.org/doc/html/rfc6901 +type ResultReference struct { + // The method call id of a previous method call in the current request. + ResultOf string `json:"resultOf"` + + // The required name of a response to that method call. + Name Command `json:"name"` + + // A pointer into the arguments of the response selected via the name and resultOf properties. + // + // This is a JSON Pointer [RFC6901], except it also allows the use of * to map through an array. + // + // [RFC6901]: https://datatracker.ietf.org/doc/html/rfc6901 + Path string `json:"path,omitempty"` +} + +type EmailGetRefCommand struct { + // The ids of the Email objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that + // data type and the number of records does not exceed the maxObjectsInGet limit. + IdsRef *ResultReference `json:"#ids,omitempty"` + + // The id of the account to use. + AccountId string `json:"accountId"` + + // If supplied, only the properties listed in the array are returned for each Email object. + // + // If null, the following properties are returned: + // + // [ "id", "blobId", "threadId", "mailboxIds", "keywords", "size", + // "receivedAt", "messageId", "inReplyTo", "references", "sender", "from", + // "to", "cc", "bcc", "replyTo", "subject", "sentAt", "hasAttachment", + // "preview", "bodyValues", "textBody", "htmlBody", "attachments" ] + // + // The id property of the object is always returned, even if not explicitly requested. + // + // If an invalid property is requested, the call MUST be rejected with an invalidArguments error. + Properties []string `json:"properties,omitempty"` + + // A list of properties to fetch for each EmailBodyPart returned. + // + // If omitted, this defaults to: + // + // [ "partId", "blobId", "size", "name", "type", "charset", "disposition", "cid", "language", "location" ] + // + BodyProperties []string `json:"bodyProperties,omitempty"` + + // (default: false) If true, the bodyValues property includes any text/* part in the textBody property. + FetchTextBodyValues bool `json:"fetchTextBodyValues,omitzero"` + + // (default: false) If true, the bodyValues property includes any text/* part in the htmlBody property. + FetchHTMLBodyValues bool `json:"fetchHTMLBodyValues,omitzero"` + + // (default: false) If true, the bodyValues property includes any text/* part in the bodyStructure property. + FetchAllBodyValues bool `json:"fetchAllBodyValues,omitzero"` + + // If greater than zero, the value property of any EmailBodyValue object returned in bodyValues + // MUST be truncated if necessary so it does not exceed this number of octets in size. + // + // If 0 (the default), no truncation occurs. + // + // The server MUST ensure the truncation results in valid UTF-8 and does not occur mid-codepoint. + // + // If the part is of type text/html, the server SHOULD NOT truncate inside an HTML tag, e.g., in + // the middle of . + // + // There is no requirement for the truncated form to be a balanced tree or valid HTML (indeed, the original + // source may well be neither of these things). + MaxBodyValueBytes uint `json:"maxBodyValueBytes,omitempty"` +} + +type EmailChangesCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // The current state of the client. + // + // This is the string that was returned as the state argument in the Email/get response. + // The server will return the changes that have occurred since this state. + SinceState State `json:"sinceState,omitzero,omitempty"` + + // The maximum number of ids to return in the response. + // + // The server MAY choose to return fewer than this value but MUST NOT return more. + // If not given by the client, the server may choose how many to return. + // If supplied by the client, the value MUST be a positive integer greater than 0. + MaxChanges uint `json:"maxChanges,omitzero"` +} + +type EmailAddress struct { + // The display-name of the mailbox [RFC5322]. + // + // If this is a quoted-string: + // 1. The surrounding DQUOTE characters are removed. + // 2. Any quoted-pair is decoded. + // 3. White space is unfolded, and then any leading and trailing white space is removed. + // If there is no display-name but there is a comment immediately following the addr-spec, the value of this + // SHOULD be used instead. Otherwise, this property is null. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + // + // example: $emailAddressName + Name string `json:"name,omitempty"` + + // The addr-spec of the mailbox [RFC5322]. + // + // Any syntactically correct encoded sections [RFC2047] with a known encoding MUST be decoded, + // following the same rules as for the Text form. + // + // Parsing SHOULD be best effort in the face of invalid structure to accommodate invalid messages and + // semi-complete drafts. EmailAddress objects MAY have an email property that does not conform to the + // addr-spec form (for example, may not contain an @ symbol). + // + // [RFC2047]: https://www.rfc-editor.org/rfc/rfc2047.html + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + // + // example: $emailAddressEmail + Email string `json:"email,omitempty"` +} + +type EmailAddressGroup struct { + // The display-name of the group [RFC5322], or null if the addresses are not part of a group. + // + // If this is a quoted-string, it is processed the same as the name in the EmailAddress type. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + + Name string `json:"name,omitempty"` + + // The mailbox values that belong to this group, represented as EmailAddress objects. + Addresses []EmailAddress `json:"addresses,omitempty"` +} + +type EmailHeader struct { + // The header field name as defined in [RFC5322], with the same capitalization that it has in the message. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + Name string `json:"name"` + + // The header field value as defined in [RFC5322], in Raw form. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + Value string `json:"value"` +} + +// Email body part. +// +// The client may specify a `partId` OR a `blobId`, but not both. +// If a `partId` is given, this `partId` MUST be present in the `bodyValues` property. +// +// The `charset` property MUST be omitted if a `partId` is given (the part’s content is included +// in `bodyValues`, and the server may choose any appropriate encoding). +// +// The `size` property MUST be omitted if a `partId` is given. If a `blobId` is given, it may be +// included but is ignored by the server (the size is actually calculated from the blob content +// itself). +// +// A `Content-Transfer-Encoding` header field MUST NOT be given. +type EmailBodyPart struct { + // Identifies this part uniquely within the Email. + // + // This is scoped to the `emailId` and has no meaning outside of the JMAP Email object representation. + // This is null if, and only if, the part is of type `multipart/*`. + // + // example: $attachmentPartId + PartId string `json:"partId,omitempty"` + + // The id representing the raw octets of the contents of the part, after decoding any known + // `Content-Transfer-Encoding` (as defined in [RFC2045]), or null if, and only if, the part is of type `multipart/*`. + // + // Note that two parts may be transfer-encoded differently but have the same blob id if their decoded octets are identical + // and the server is using a secure hash of the data for the blob id. + // If the transfer encoding is unknown, it is treated as though it had no transfer encoding. + // + // [RFC2045]: https://www.rfc-editor.org/rfc/rfc2045.html + // + // example: $blobId + BlobId string `json:"blobId,omitempty"` + + // The size, in octets, of the raw data after content transfer decoding (as referenced by the `blobId`, i.e., + // the number of octets in the file the user would download). + // + // example: 31219 + Size int `json:"size,omitempty"` + + // This is a list of all header fields in the part, in the order they appear in the message. + // + // The values are in Raw form. + Headers []EmailHeader `json:"headers,omitempty"` + + // This is the decoded filename parameter of the `Content-Disposition` header field per [RFC2231], or + // (for compatibility with existing systems) if not present, then it’s the decoded name parameter of + // the `Content-Type` header field per [RFC2047]. + // + // [RFC2231]: https://www.rfc-editor.org/rfc/rfc2231.html + // [RFC2047]: https://www.rfc-editor.org/rfc/rfc2047.html + // + // name: $attachmentName + Name string `json:"name,omitempty"` + + // The value of the `Content-Type` header field of the part, if present; otherwise, the implicit type as per + // the MIME standard (`text/plain` or `message/rfc822` if inside a `multipart/digest`). + // + // [CFWS] is removed and any parameters are stripped. + // + // [CFWS]: https://www.rfc-editor.org/rfc/rfc5322#section-3.2.2 + // + // example: $attachmentType + Type string `json:"type,omitempty"` + + // The value of the `charset` parameter of the `Content-Type` header field, if present, or null if the header + // field is present but not of type `text/*`. + // + // If there is no `Content-Type` header field, or it exists and is of type `text/*` but has no `charset` parameter, + // this is the implicit charset as per the MIME standard: `us-ascii`. + // + // example: $attachmentCharset + Charset string `json:"charset,omitempty"` + + // The value of the `Content-Disposition` header field of the part, if present; + // otherwise, it’s null. + // + // [CFWS] is removed and any parameters are stripped. + // + // [CFWS]: https://www.rfc-editor.org/rfc/rfc5322#section-3.2.2 + // + // example: $attachmentDisposition + Disposition string `json:"disposition,omitempty"` + + // The value of the `Content-Id` header field of the part, if present; otherwise it’s null. + // + // [CFWS] and surrounding angle brackets (`<>`) are removed. + // + // This may be used to reference the content from within a `text/html` body part HTML using the `cid:` protocol, + // as defined in [RFC2392]. + // + // [RFC2392]: https://www.rfc-editor.org/rfc/rfc2392.html + // [CFWS]: https://www.rfc-editor.org/rfc/rfc5322#section-3.2.2 + // + // example: $attachmentCid + Cid string `json:"cid,omitempty"` + + // The list of language tags, as defined in [RFC3282], in the `Content-Language` header field of the part, + // if present. + // + // [RFC3282]: https://www.rfc-editor.org/rfc/rfc3282.html + Language string `json:"language,omitempty"` + + // The URI, as defined in [RFC2557], in the `Content-Location` header field of the part, if present. + // + // [RFC2557]: https://www.rfc-editor.org/rfc/rfc2557.html + Location string `json:"location,omitempty"` + + // If the type is `multipart/*`, this contains the body parts of each child. + SubParts []EmailBodyPart `json:"subParts,omitempty"` +} + +type EmailBodyValue struct { + // The value of the body part after decoding `Content-Transfer-Encoding` and the `Content-Type` charset, + // if both known to the server, and with any CRLF replaced with a single LF. + // + // The server MAY use heuristics to determine the charset to use for decoding if the charset is unknown, + // no charset is given, or it believes the charset given is incorrect. + // + // Decoding is best effort; the server SHOULD insert the unicode replacement character (`U+FFFD`) and continue + // when a malformed section is encountered. + // + // Note that due to the charset decoding and line ending normalisation, the length of this string will + // probably not be exactly the same as the size property on the corresponding EmailBodyPart. + Value string `json:"value,omitempty"` + + // This is true if malformed sections were found while decoding the charset, + // or the charset was unknown, or the content-transfer-encoding was unknown. + // + // Default value is false. + IsEncodingProblem bool `json:"isEncodingProblem,omitzero"` + + // This is true if the value has been truncated. + // + // Default value is false. + IsTruncated bool `json:"isTruncated,omitzero"` +} + +const ( + EmailPropertyId = "id" + EmailPropertyBlodId = "blobId" + EmailPropertyThreadId = "threadId" + EmailPropertyMailboxIds = "mailboxIds" + EmailPropertyKeywords = "keywords" + EmailPropertySize = "size" + EmailPropertyReceivedAt = "receivedAt" + EmailPropertyHeaders = "headers" + EmailPropertyMessageId = "messageId" + EmailPropertyInReplyTo = "inReplyTo" + EmailPropertyReferences = "references" + EmailPropertySender = "sender" + EmailPropertyFrom = "from" + EmailPropertyTo = "to" + EmailPropertyCc = "cc" + EmailPropertyBcc = "bcc" + EmailPropertyReplyTo = "replyTo" + EmailPropertySubject = "subject" + EmailPropertySentAt = "sentAt" + EmailPropertyBodyStructure = "bodyStructure" + EmailPropertyBodyValues = "bodyValues" + EmailPropertyTextBody = "textBody" + EmailPropertyHtmlBody = "htmlBody" + EmailPropertyAttachments = "attachments" + EmailPropertyHasAttachment = "hasAttachment" + EmailPropertyPreview = "preview" +) + +var EmailProperties = []string{ + EmailPropertyId, + EmailPropertyBlodId, + EmailPropertyThreadId, + EmailPropertyMailboxIds, + EmailPropertyKeywords, + EmailPropertySize, + EmailPropertyReceivedAt, + EmailPropertyHeaders, + EmailPropertyMessageId, + EmailPropertyInReplyTo, + EmailPropertyReferences, + EmailPropertySender, + EmailPropertyFrom, + EmailPropertyTo, + EmailPropertyCc, + EmailPropertyBcc, + EmailPropertyReplyTo, + EmailPropertySubject, + EmailPropertySentAt, + EmailPropertyBodyStructure, + EmailPropertyBodyValues, + EmailPropertyTextBody, + EmailPropertyHtmlBody, + EmailPropertyAttachments, + EmailPropertyHasAttachment, + EmailPropertyPreview, +} + +// An Email. +// +// swagger:model +type Email struct { + // The id of the Email object. + // + // Note that this is the JMAP object id, NOT the `Message-ID` header field value of the message [RFC5322]. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + // + // required: true + // example: $emailId + Id string `json:"id,omitempty"` + + // The id representing the raw octets of the message [RFC5322] for this Email. + // + // This may be used to download the raw original message or to attach it directly to another Email, etc. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + // + // example: $blobId + BlobId string `json:"blobId,omitempty"` + + // The id of the Thread to which this Email belongs. + // + // example: $threadId + ThreadId string `json:"threadId,omitempty"` + + // The number of emails (this one included) that are in the thread this email is in. + // Note that this is not part of the JMAP specification, and is only calculated when requested. + ThreadSize int `json:"threadSize,omitzero"` + + // The account ID this email belongs to. + // Note that this is not part of the JMAP specification, and is only contained in all-account operations. + AccountId string `json:"accountId,omitempty"` + + // The set of Mailbox ids this Email belongs to. + // + // An Email in the mail store MUST belong to one or more Mailboxes at all times (until it is destroyed). + // The set is represented as an object, with each key being a Mailbox id. + // + // The value for each key in the object MUST be true. + // + // example: {"a": true} + MailboxIds map[string]bool `json:"mailboxIds,omitempty"` + + // A set of keywords that apply to the Email. + // + // The set is represented as an object, with the keys being the keywords. + // + // The value for each key in the object MUST be true. + // + // Keywords are shared with IMAP. + // + // The six system keywords from IMAP get special treatment. + // + // The following four keywords have their first character changed from \ in IMAP to $ in JMAP and have particular semantic meaning: + // + // - $draft: The Email is a draft the user is composing. + // - $seen: The Email has been read. + // - $flagged: The Email has been flagged for urgent/special attention. + // - $answered: The Email has been replied to. + // + // The IMAP \Recent keyword is not exposed via JMAP. The IMAP \Deleted keyword is also not present: IMAP uses a delete+expunge model, + // which JMAP does not. Any message with the \Deleted keyword MUST NOT be visible via JMAP (and so are not counted in the + // “totalEmails”, “unreadEmails”, “totalThreads”, and “unreadThreads” Mailbox properties). + // + // Users may add arbitrary keywords to an Email. + // For compatibility with IMAP, a keyword is a case-insensitive string of 1–255 characters in the ASCII subset + // %x21–%x7e (excludes control chars and space), and it MUST NOT include any of these characters: + // + // ( ) { ] % * " \ + // + // Because JSON is case sensitive, servers MUST return keywords in lowercase. + // + // The [IMAP and JMAP Keywords] registry as established in [RFC5788] assigns semantic meaning to some other + // keywords in common use. + // + // New keywords may be established here in the future. In particular, note: + // + // - $forwarded: The Email has been forwarded. + // - $phishing: The Email is highly likely to be phishing. + // Clients SHOULD warn users to take care when viewing this Email and disable links and attachments. + // - $junk: The Email is definitely spam. + // Clients SHOULD set this flag when users report spam to help train automated spam-detection systems. + // - $notjunk: The Email is definitely not spam. + // Clients SHOULD set this flag when users indicate an Email is legitimate, to help train automated spam-detection systems. + // + // [IMAP and JMAP Keywords]: https://www.iana.org/assignments/imap-jmap-keywords/ + // [RFC5788]: https://www.rfc-editor.org/rfc/rfc5788.html + Keywords map[string]bool `json:"keywords,omitempty"` + + // The size, in octets, of the raw data for the message [RFC5322] + // (as referenced by the blobId, i.e., the number of octets in the file the user would download). + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + Size int `json:"size,omitzero"` + + // The date the Email was received by the message store. + // + // This is the internal date in IMAP [RFC3501]. + // + // [RFC3501]: https://www.rfc-editor.org/rfc/rfc3501.html + ReceivedAt time.Time `json:"receivedAt,omitzero"` + + // This is a list of all header fields [RFC5322], in the same order they appear in the message. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + Headers []EmailHeader `json:"headers,omitempty"` + + // The value is identical to the value of header:Message-ID:asMessageIds. + // + // For messages conforming to [RFC5322] this will be an array with a single entry. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + MessageId []string `json:"messageId,omitempty"` + + // The value is identical to the value of header:In-Reply-To:asMessageIds. + InReplyTo []string `json:"inReplyTo,omitempty"` + + // The value is identical to the value of header:References:asMessageIds. + References []string `json:"references,omitempty"` + + // The value is identical to the value of header:Sender:asAddresses. + Sender []EmailAddress `json:"sender,omitempty"` + + // The value is identical to the value of header:From:asAddresses. + From []EmailAddress `json:"from,omitempty"` + + // The value is identical to the value of header:To:asAddresses. + To []EmailAddress `json:"to,omitempty"` + + // The value is identical to the value of header:Cc:asAddresses. + Cc []EmailAddress `json:"cc,omitempty"` + + // The value is identical to the value of header:Bcc:asAddresses. + Bcc []EmailAddress `json:"bcc,omitempty"` + + // The value is identical to the value of header:Reply-To:asAddresses. + ReplyTo []EmailAddress `json:"replyTo,omitempty"` + + // The value is identical to the value of header:Subject:asText. + Subject string `json:"subject,omitempty"` + + // The value is identical to the value of header:Date:asDate. + SentAt time.Time `json:"sentAt,omitzero"` + + // This is the full MIME structure of the message body, without recursing into message/rfc822 or message/global parts. + // + // Note that EmailBodyParts may have subParts if they are of type multipart/*. + BodyStructure *EmailBodyPart `json:"bodyStructure,omitzero"` + + // This is a map of partId to an EmailBodyValue object for none, some, or all text/* parts. + // + // Which parts are included and whether the value is truncated is determined by various arguments to Email/get and Email/parse. + BodyValues map[string]EmailBodyValue `json:"bodyValues,omitempty"` + + // A list of text/plain, text/html, image/*, audio/*, and/or video/* parts to display (sequentially) as the + // message body, with a preference for text/plain when alternative versions are available. + TextBody []EmailBodyPart `json:"textBody,omitempty"` + + // A list of text/plain, text/html, image/*, audio/*, and/or video/* parts to display (sequentially) as the + // message body, with a preference for text/html when alternative versions are available. + HtmlBody []EmailBodyPart `json:"htmlBody,omitempty"` + + // A list, traversing depth-first, of all parts in bodyStructure. + // + // They must satisfy either of the following conditions: + // + // - not of type multipart/* and not included in textBody or htmlBody + // - of type image/*, audio/*, or video/* and not in both textBody and htmlBody + // + // None of these parts include subParts, including message/* types. + // + // Attached messages may be fetched using the Email/parse method and the blobId. + // + // Note that a text/html body part HTML may reference image parts in attachments by using cid: + // links to reference the Content-Id, as defined in [RFC2392], or by referencing the Content-Location. + // + // [RFC2392]: https://www.rfc-editor.org/rfc/rfc2392.html + Attachments []EmailBodyPart `json:"attachments,omitempty"` + + // This is true if there are one or more parts in the message that a client UI should offer as downloadable. + // + // A server SHOULD set hasAttachment to true if the attachments list contains at least one item that + // does not have Content-Disposition: inline. + // + // The server MAY ignore parts in this list that are processed automatically in some way or are referenced + // as embedded images in one of the text/html parts of the message. + // + // The server MAY set hasAttachment based on implementation-defined or site-configurable heuristics. + HasAttachment bool `json:"hasAttachment,omitempty"` + + // A plaintext fragment of the message body. + // + // This is intended to be shown as a preview line when listing messages in the mail store and may be truncated + // when shown. + // + // The server may choose which part of the message to include in the preview; skipping quoted sections and + // salutations and collapsing white space can result in a more useful preview. + // + // This MUST NOT be more than 256 characters in length. + // + // As this is derived from the message content by the server, and the algorithm for doing so could change over + // time, fetching this for an Email a second time MAY return a different result. + // However, the previous value is not considered incorrect, and the change SHOULD NOT cause the Email object + // to be considered as changed by the server. + Preview string `json:"preview,omitempty"` +} + +type AddressParameters struct { + HoldUntil time.Time `json:"HOLDUNTIL,omitzero"` + HoldForSeconds uint `json:"HOLDFOR,omitzero"` +} + +type Address struct { + // The email address being represented by the object. + // + // This is a “Mailbox” as used in the Reverse-path or Forward-path of the MAIL FROM or RCPT TO command in [RFC5321]. + // + // [RFC5321]: https://datatracker.ietf.org/doc/html/rfc5321 + Email string `json:"email,omitempty"` + + // Any parameters to send with the email address (either mail-parameter or rcpt-parameter as appropriate, + // as specified in [RFC5321]). + // + // If supplied, each key in the object is a parameter name, and the value is either the parameter value (type String) + // or null if the parameter does not take a value. + // + // [RFC5321]: https://datatracker.ietf.org/doc/html/rfc5321 + Parameters *AddressParameters `json:"parameters,omitempty"` +} + +// Information for use when sending via SMTP. +type Envelope struct { + // The email address to use as the return address in the SMTP submission, + // plus any parameters to pass with the MAIL FROM address. + MailFrom Address `json:"mailFrom"` + + // The email addresses to send the message to, and any RCPT TO parameters to pass with the recipient. + RcptTo []Address `json:"rcptTo"` +} + +type EmailSubmissionUndoStatus string + +const ( + // It may be possible to cancel this submission. + UndoStatusPending EmailSubmissionUndoStatus = "pending" + + // The message has been relayed to at least one recipient in a manner that cannot be recalled. + // It is no longer possible to cancel this submission. + UndoStatusFinal EmailSubmissionUndoStatus = "final" + + // The submission was canceled and will not be delivered to any recipient. + UndoStatusCanceled EmailSubmissionUndoStatus = "canceled" +) + +type DeliveryStatusDelivered string + +const ( + // The message is in a local mail queue and status will change once it exits the local mail + // queues. + // The smtpReply property may still change. + DeliveredQueued DeliveryStatusDelivered = "queued" + + // The message was successfully delivered to the mail store of the recipient. + // The smtpReply property is final. + DeliveredYes DeliveryStatusDelivered = "yes" + + // Delivery to the recipient permanently failed. + // The smtpReply property is final. + DeliveredNo DeliveryStatusDelivered = "no" + + // The final delivery status is unknown, (e.g., it was relayed to an external machine + // and no further information is available). + // + // The smtpReply property may still change if a DSN arrives. + DeliveredUnknown DeliveryStatusDelivered = "unknown" +) + +type DeliveryStatusDisplayed string + +const ( + // The display status is unknown. + // + // This is the initial value. + DisplayedUnknown DeliveryStatusDisplayed = "unknown" + + // The recipient’s system claims the message content has been displayed to the recipient. + // + // Note that there is no guarantee that the recipient has noticed, read, or understood the content. + DisplayedYes DeliveryStatusDisplayed = "yes" +) + +type DeliveryStatus struct { + // The SMTP reply string returned for this recipient when the server last tried to + // relay the message, or in a later Delivery Status Notification (DSN, as defined in + // [RFC3464]) response for the message. + // + // This SHOULD be the response to the RCPT TO stage, unless this was accepted and the + // message as a whole was rejected at the end of the DATA stage, in which case the + // DATA stage reply SHOULD be used instead. + // + // [RFC3464]: https://datatracker.ietf.org/doc/html/rfc3464 + SmtpReply string `json:"smtpReply"` + + // Represents whether the message has been successfully delivered to the recipient. + // + // This MUST be one of the following values: + // - queued: The message is in a local mail queue and status will change once it exits + // the local mail queues. The smtpReply property may still change. + // - yes: The message was successfully delivered to the mail store of the recipient. + // The smtpReply property is final. + // - no: Delivery to the recipient permanently failed. The smtpReply property is final. + // - unknown: The final delivery status is unknown, (e.g., it was relayed to an external + // machine and no further information is available). + // The smtpReply property may still change if a DSN arrives. + Delivered DeliveryStatusDelivered `json:"delivered"` + + // Represents whether the message has been displayed to the recipient. + // + // This MUST be one of the following values: + // - unknown: The display status is unknown. This is the initial value. + // - yes: The recipient’s system claims the message content has been displayed to the recipient. + // Note that there is no guarantee that the recipient has noticed, read, or understood the content. + Displayed DeliveryStatusDisplayed `json:"displayed"` +} + +type EmailSubmission struct { + // The id of the EmailSubmission (server-set). + Id string `json:"id"` + + // The id of the Identity to associate with this submission. + IdentityId string `json:"identityId"` + + // The id of the Email to send. + // + // The Email being sent does not have to be a draft, for example, when “redirecting” an existing Email + // to a different address. + EmailId string `json:"emailId"` + + // The Thread id of the Email to send (server-set). + // + // This is set by the server to the threadId property of the Email referenced by the emailId. + ThreadId string `json:"threadId"` + + // Information for use when sending via SMTP. + // + // If the envelope property is null or omitted on creation, the server MUST generate this from the + // referenced Email as follows: + // + // - mailFrom: The email address in the Sender header field, if present; otherwise, + // it’s the email address in the From header field, if present. + // In either case, no parameters are added. + // - rcptTo: The deduplicated set of email addresses from the To, Cc, and Bcc header fields, + // if present, with no parameters for any of them. + Envelope *Envelope `json:"envelope,omitempty"` + + // The date the submission was/will be released for delivery (server-set). + SendAt time.Time `json:"sendAt,omitzero"` + + // This represents whether the submission may be canceled (server-set). + // + // This is server set on create and MUST be one of the following values: + // + // - pending: It may be possible to cancel this submission. + // - final: The message has been relayed to at least one recipient in a manner that cannot be + // recalled. It is no longer possible to cancel this submission. + // - canceled: The submission was canceled and will not be delivered to any recipient. + UndoStatus EmailSubmissionUndoStatus `json:"undoStatus"` + + // This represents the delivery status for each of the submission’s recipients, if known (server-set). + // + // This property MAY not be supported by all servers, in which case it will remain null. + // + // Servers that support it SHOULD update the EmailSubmission object each time the status of any of + // the recipients changes, even if some recipients are still being retried. + // + // This value is a map from the email address of each recipient to a DeliveryStatus object. + DeliveryStatus map[string]DeliveryStatus `json:"deliveryStatus"` + + // A list of blob ids for DSNs [RFC3464] received for this submission, + // in order of receipt, oldest first (server-set) . + // + // The blob is the whole MIME message (with a top-level content-type of multipart/report), as received. + // + // [RFC3464]: https://datatracker.ietf.org/doc/html/rfc3464 + DsnBlobIds []string `json:"dsnBlobIds,omitempty"` + + // A list of blob ids for MDNs [RFC8098] received for this submission, + // in order of receipt, oldest first (server-set). + // + // The blob is the whole MIME message (with a top-level content-type of multipart/report), as received. + // + // [RFC8098]: https://datatracker.ietf.org/doc/html/rfc8098 + MdnBlobIds []string `json:"mdnBlobIds,omitempty"` +} + +type EmailSubmissionGetCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // The ids of the EmailSubmission objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that data + // type and the number of records does not exceed the maxObjectsInGet limit. + Ids []string `json:"ids,omitempty"` + + // If supplied, only the properties listed in the array are returned for each EmailSubmission object. + // + // If null, all properties of the object are returned. The id property of the object is always returned, + // even if not explicitly requested. If an invalid property is requested, the call MUST be rejected + // with an invalidArguments error. + Properties []string `json:"properties,omitempty"` +} + +type EmailSubmissionGetRefCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // The ids of the EmailSubmission objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that data + // type and the number of records does not exceed the maxObjectsInGet limit. + IdRef *ResultReference `json:"#ids,omitempty"` + + // If supplied, only the properties listed in the array are returned for each EmailSubmission object. + // + // If null, all properties of the object are returned. The id property of the object is always returned, + // even if not explicitly requested. If an invalid property is requested, the call MUST be rejected + // with an invalidArguments error. + Properties []string `json:"properties,omitempty"` +} + +type EmailSubmissionGetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A (preferably short) string representing the state on the server for all the data + // of this type in the account (not just the objects returned in this call). + // + // If the data changes, this string MUST change. If the EmailSubmission data is unchanged, + // servers SHOULD return the same state string on subsequent requests for this data type. + // + // When a client receives a response with a different state string to a previous call, + // it MUST either throw away all currently cached objects for the type or call + // EmailSubmission/changes to get the exact changes. + State State `json:"state"` + + // An array of the EmailSubmission objects requested. + // + // This is the empty array if no objects were found or if the ids argument passed in + // was also an empty array. + // + // The results MAY be in a different order to the ids in the request arguments. + // If an identical id is included more than once in the request, the server MUST only + // include it once in either the list or the notFound argument of the response. + List []EmailSubmission `json:"list,omitempty"` + + // This array contains the ids passed to the method for records that do not exist. + // + // The array is empty if all requested ids were found or if the ids argument passed in was + // either null or an empty array. + NotFound []string `json:"notFound,omitempty"` +} + +// Patch Object. +// +// Example: +// +// - moves it from the drafts folder (which has Mailbox id “7cb4e8ee-df87-4757-b9c4-2ea1ca41b38e”) +// to the sent folder (which we presume has Mailbox id “73dbcb4b-bffc-48bd-8c2a-a2e91ca672f6”) +// +// - removes the $draft flag and +// +// { +// "mailboxIds/7cb4e8ee-df87-4757-b9c4-2ea1ca41b38e": null, +// "mailboxIds/73dbcb4b-bffc-48bd-8c2a-a2e91ca672f6": true, +// "keywords/$draft": null +// } +type PatchObject map[string]any + +// same as EmailSubmission but without the server-set attributes +type EmailSubmissionCreate struct { + // The id of the Identity to associate with this submission. + IdentityId string `json:"identityId"` + + // The id of the Email to send. + // + // The Email being sent does not have to be a draft, for example, when “redirecting” an existing + // Email to a different address. + EmailId string `json:"emailId"` + + // Information for use when sending via SMTP. + Envelope *Envelope `json:"envelope,omitempty"` +} + +type EmailSubmissionSetCommand struct { + AccountId string `json:"accountId"` + Create map[string]EmailSubmissionCreate `json:"create,omitempty"` + OldState State `json:"oldState,omitempty"` + NewState State `json:"newState,omitempty"` + + // A map of EmailSubmission id to an object containing properties to update on the Email object + // referenced by the EmailSubmission if the create/update/destroy succeeds. + // + // (For references to EmailSubmissions created in the same “/set” invocation, this is equivalent + // to a creation-reference, so the id will be the creation id prefixed with a #.) + OnSuccessUpdateEmail map[string]PatchObject `json:"onSuccessUpdateEmail,omitempty"` + + // A list of EmailSubmission ids for which the Email with the corresponding emailId should be destroyed + // if the create/update/destroy succeeds. + // + // (For references to EmailSubmission creations, this is equivalent to a creation-reference so the + // id will be the creation id prefixed with a #.) + OnSuccessDestroyEmail []string `json:"onSuccessDestroyEmail,omitempty"` +} + +type CreatedEmailSubmission struct { + Id string `json:"id"` +} + +type EmailSubmissionSetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // This is the sinceState argument echoed back; it’s the state from which the server is returning changes. + OldState State `json:"oldState"` + + // This is the state the client will be in after applying the set of changes to the old state. + NewState State `json:"newState"` + + // If true, the client may call EmailSubmission/changes again with the newState returned to get further + // updates. + // + // If false, newState is the current server state. + HasMoreChanges bool `json:"hasMoreChanges"` + + // An array of ids for records that have been created since the old state. + Created map[string]CreatedEmailSubmission `json:"created,omitempty"` + + // A map of the creation id to a SetError object for each record that failed to be created, or + // null if all successful. + NotCreated map[string]SetError `json:"notCreated,omitempty"` + + // TODO(pbleser-oc) add updated and destroyed when they are needed +} + +type Command string + +type Invocation struct { + Command Command + Parameters any + Tag string +} + +func invocation(command Command, parameters any, tag string) Invocation { + return Invocation{ + Command: command, + Parameters: parameters, + Tag: tag, + } +} + +type TypeOfRequest string + +const RequestType = TypeOfRequest("Request") + +type Request struct { + // The set of capabilities the client wishes to use. + // + // The client MAY include capability identifiers even if the method calls it makes do not utilise those capabilities. + // The server advertises the set of specifications it supports in the Session object (see [Section 2]), as keys on + // the capabilities property. + // + // [Section 2]: https://jmap.io/spec-core.html#the-jmap-session-resource + Using []string `json:"using"` + + // An array of method calls to process on the server. + // + // The method calls MUST be processed sequentially, in order. + MethodCalls []Invocation `json:"methodCalls"` + + // A map of a (client-specified) creation id to the id the server assigned when a record was successfully created (optional). + CreatedIds map[string]string `json:"createdIds,omitempty"` + + // This MUST be the string "Request". + // The specification extends the Response object with two additional arguments when used over a WebSocket. + Type TypeOfRequest `json:"@type,omitempty"` + + // A client-specified identifier for the request to be echoed back in the response to this request (optional). + Id string `json:"id,omitempty"` +} + +type TypeOfResponse string + +const ResponseType = TypeOfResponse("Response") + +type Response struct { + // An array of responses, in the same format as the methodCalls on the Request object. + // The output of the methods MUST be added to the methodResponses array in the same order that the methods are processed. + MethodResponses []Invocation `json:"methodResponses"` + + // A map of a (client-specified) creation id to the id the server assigned when a record was successfully created. + // + // Optional; only returned if given in the request. + // + // This MUST include all creation ids passed in the original createdIds parameter of the Request object, as well as any + // additional ones added for newly created records. + CreatedIds map[string]string `json:"createdIds,omitempty"` + + // The current value of the “state” string on the Session object, as described in [Section 2]. + // Clients may use this to detect if this object has changed and needs to be refetched. + // + // [Section 2]: https://jmap.io/spec-core.html#the-jmap-session-resource + SessionState SessionState `json:"sessionState"` + + // This MUST be the string "Response". + // The specification extends the Response object with two additional arguments when used over a WebSocket. + Type TypeOfResponse `json:"@type,omitempty"` + + // MUST be returned if an identifier is included in the request (optional). + RequestId string `json:"requestId,omitempty"` +} + +type EmailQueryResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A string encoding the current state of the query on the server. + // + // This string MUST change if the results of the query (i.e., the matching ids and their sort order) have changed. + // The queryState string MAY change if something has changed on the server, which means the results may have changed + // but the server doesn’t know for sure. + // + // The queryState string only represents the ordered list of ids that match the particular query (including its sort/filter). + // There is no requirement for it to change if a property on an object matching the query changes but the query results are unaffected + // (indeed, it is more efficient if the queryState string does not change in this case). + // + // The queryState string only has meaning when compared to future responses to a query with the same type/sort/filter or when used with + // /queryChanges to fetch changes. + // + // Should a client receive back a response with a different queryState string to a previous call, it MUST either throw away the currently + // cached query and fetch it again (note, this does not require fetching the records again, just the list of ids) or call + // Email/queryChanges to get the difference. + QueryState State `json:"queryState"` + + // This is true if the server supports calling Email/queryChanges with these filter/sort parameters. + // + // Note, this does not guarantee that the Email/queryChanges call will succeed, as it may only be possible for a limited time + // afterwards due to server internal implementation details. + CanCalculateChanges bool `json:"canCalculateChanges"` + + // The zero-based index of the first result in the ids array within the complete list of query results. + Position uint `json:"position"` + + // The list of ids for each Email in the query results, starting at the index given by the position argument of this + // response and continuing until it hits the end of the results or reaches the limit number of ids. + // + // If position is >= total, this MUST be the empty list. + Ids []string `json:"ids"` + + // The total number of Emails in the results (given the filter). + // + // Only if requested. + // + // This argument MUST be omitted if the calculateTotal request argument is not true. + Total uint `json:"total,omitempty,omitzero"` + + // The limit enforced by the server on the maximum number of results to return (if set by the server). + // + // This is only returned if the server set a limit or used a different limit than that given in the request. + Limit uint `json:"limit,omitempty,omitzero"` +} + +type EmailGetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A (preferably short) string representing the state on the server for all the data of this type + // in the account (not just the objects returned in this call). + // + // If the data changes, this string MUST change. + // If the Email data is unchanged, servers SHOULD return the same state string on subsequent requests for this data type. + State State `json:"state"` + + // An array of the Email objects requested. + // + // This is the empty array if no objects were found or if the ids argument passed in was also an empty array. + // + // The results MAY be in a different order to the ids in the request arguments. + // + // If an identical id is included more than once in the request, the server MUST only include it once in either + // the list or the notFound argument of the response. + List []Email `json:"list"` + + // This array contains the ids passed to the method for records that do not exist. + // + // The array is empty if all requested ids were found or if the ids argument passed in was either null or an empty array. + NotFound []string `json:"notFound"` +} + +type EmailChangesResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // This is the sinceState argument echoed back; it’s the state from which the server is returning changes. + OldState State `json:"oldState"` + + // This is the state the client will be in after applying the set of changes to the old state. + NewState State `json:"newState"` + + // If true, the client may call Email/changes again with the newState returned to get further updates. + // If false, newState is the current server state. + HasMoreChanges bool `json:"hasMoreChanges"` + + // An array of ids for records that have been created since the old state. + Created []string `json:"created,omitempty"` + + // An array of ids for records that have been updated since the old state. + Updated []string `json:"updated,omitempty"` + + // An array of ids for records that have been destroyed since the old state. + Destroyed []string `json:"destroyed,omitempty"` +} + +type MailboxGetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A (preferably short) string representing the state on the server for all the data of this type in the account + // (not just the objects returned in this call). + // If the data changes, this string MUST change. + // If the Mailbox data is unchanged, servers SHOULD return the same state string on subsequent requests for this data type. + // When a client receives a response with a different state string to a previous call, it MUST either throw away all currently + // cached objects for the type or call Foo/changes to get the exact changes. + State State `json:"state"` + + // An array of the Mailbox objects requested. + // This is the empty array if no objects were found or if the ids argument passed in was also an empty array. + // The results MAY be in a different order to the ids in the request arguments. + // If an identical id is included more than once in the request, the server MUST only include it once in either + // the list or the notFound argument of the response. + List []Mailbox `json:"list"` + + // This array contains the ids passed to the method for records that do not exist. + // The array is empty if all requested ids were found or if the ids argument passed in was either null or an empty array. + NotFound []any `json:"notFound"` +} + +type MailboxChangesResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // This is the sinceState argument echoed back; it’s the state from which the server is returning changes. + OldState State `json:"oldState"` + + // This is the state the client will be in after applying the set of changes to the old state. + NewState State `json:"newState"` + + // If true, the client may call Mailbox/changes again with the newState returned to get further updates. + // + // If false, newState is the current server state. + HasMoreChanges bool `json:"hasMoreChanges"` + + // An array of ids for records that have been created since the old state. + Created []string `json:"created,omitempty"` + + // An array of ids for records that have been updated since the old state. + Updated []string `json:"updated,omitempty"` + + // An array of ids for records that have been destroyed since the old state. + Destroyed []string `json:"destroyed,omitempty"` + + // If only the “totalEmails”, “unreadEmails”, “totalThreads”, and/or “unreadThreads” Mailbox properties have + // changed since the old state, this will be the list of properties that may have changed. + // + // If the server is unable to tell if only counts have changed, it MUST just be null. + UpdatedProperties []string `json:"updatedProperties,omitempty"` +} + +type MailboxQueryResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A string encoding the current state of the query on the server. + // + // This string MUST change if the results of the query (i.e., the matching ids and their sort order) have changed. + // The queryState string MAY change if something has changed on the server, which means the results may have + // changed but the server doesn’t know for sure. + // + // The queryState string only represents the ordered list of ids that match the particular query (including its + // sort/filter). There is no requirement for it to change if a property on an object matching the query changes + // but the query results are unaffected (indeed, it is more efficient if the queryState string does not change + // in this case). The queryState string only has meaning when compared to future responses to a query with the + // same type/sort/filter or when used with /queryChanges to fetch changes. + // + // Should a client receive back a response with a different queryState string to a previous call, it MUST either + // throw away the currently cached query and fetch it again (note, this does not require fetching the records + // again, just the list of ids) or call Mailbox/queryChanges to get the difference. + QueryState State `json:"queryState"` + + // This is true if the server supports calling Mailbox/queryChanges with these filter/sort parameters. + // + // Note, this does not guarantee that the Mailbox/queryChanges call will succeed, as it may only be possible for + // a limited time afterwards due to server internal implementation details. + CanCalculateChanges bool `json:"canCalculateChanges"` + + // The zero-based index of the first result in the ids array within the complete list of query results. + Position int `json:"position"` + + // The list of ids for each Mailbox in the query results, starting at the index given by the position argument + // of this response and continuing until it hits the end of the results or reaches the limit number of ids. + // + // If position is >= total, this MUST be the empty list. + Ids []string `json:"ids"` + + // The total number of Mailbox in the results (given the filter) (only if requested). + // + // This argument MUST be omitted if the calculateTotal request argument is not true. + Total int `json:"total,omitzero"` + + // The limit enforced by the server on the maximum number of results to return (if set by the server). + // + // This is only returned if the server set a limit or used a different limit than that given in the request. + Limit int `json:"limit,omitzero"` +} + +type EmailCreate struct { + // The set of Mailbox ids this Email belongs to. + // + // An Email in the mail store MUST belong to one or more Mailboxes at all times + // (until it is destroyed). + // + // The set is represented as an object, with each key being a Mailbox id. + // The value for each key in the object MUST be true. + MailboxIds map[string]bool `json:"mailboxIds,omitempty"` + + // A set of keywords that apply to the Email. + // + // The set is represented as an object, with the keys being the keywords. + // The value for each key in the object MUST be true. + Keywords map[string]bool `json:"keywords,omitempty"` + + // This is a list of all header fields [RFC5322], in the same order they appear in the message. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + Headers []EmailHeader `json:"headers,omitempty"` + + // The value is identical to the value of header:In-Reply-To:asMessageIds. + InReplyTo []string `json:"inReplyTo,omitempty"` + + // The value is identical to the value of header:References:asMessageIds. + References []string `json:"references,omitempty"` + + // The value is identical to the value of header:Sender:asAddresses. + Sender []EmailAddress `json:"sender,omitempty"` + + // The ["From:" field] specifies the author(s) of the message, that is, the mailbox(es) + // of the person(s) or system(s) responsible for the writing of the message + // + // ["From:" field]: https://www.rfc-editor.org/rfc/rfc5322.html#section-3.6.2 + From []EmailAddress `json:"from,omitempty"` + + // The value is identical to the value of header:To:asAddresses. + To []EmailAddress `json:"to,omitempty"` + + // The value is identical to the value of header:Cc:asAddresses. + Cc []EmailAddress `json:"cc,omitempty"` + + // The value is identical to the value of header:Bcc:asAddresses. + Bcc []EmailAddress `json:"bcc,omitempty"` + + // The value is identical to the value of header:Reply-To:asAddresses. + ReplyTo []EmailAddress `json:"replyTo,omitempty"` + + // The "Subject:" field contains a short string identifying the topic of the message. + Subject string `json:"subject,omitempty"` + + // The date the Email was received by the message store. + // + // (default: time of most recent Received header, or time of import on server if none). + ReceivedAt time.Time `json:"receivedAt,omitzero"` + + // The origination date specifies the date and time at which the creator of the message indicated that + // the message was complete and ready to enter the mail delivery system. + // + // For instance, this might be the time that a user pushes the "send" or "submit" button in an + // application program. + // + // In any case, it is specifically not intended to convey the time that the message is actually transported, + // but rather the time at which the human or other creator of the message has put the message into its final + // form, ready for transport. + // + // (For example, a portable computer user who is not connected to a network might queue a message for delivery. + // The origination date is intended to contain the date and time that the user queued the message, not the time + // when the user connected to the network to send the message.) + SentAt time.Time `json:"sentAt,omitzero"` + + // This is the full MIME structure of the message body, without recursing into message/rfc822 or message/global parts. + // + // Note that EmailBodyParts may have subParts if they are of type multipart/*. + BodyStructure *EmailBodyPart `json:"bodyStructure,omitempty"` + + // This is a map of partId to an EmailBodyValue object for none, some, or all text/* parts. + BodyValues map[string]EmailBodyValue `json:"bodyValues,omitempty"` + + // A list of text/plain, text/html, image/*, audio/*, and/or video/* parts to display (sequentially) as the + // message body, with a preference for text/plain when alternative versions are available. + TextBody []EmailBodyPart `json:"textBody,omitempty"` + + // A list of text/plain, text/html, image/*, audio/*, and/or video/* parts to display (sequentially) as the + // message body, with a preference for text/html when alternative versions are available. + HtmlBody []EmailBodyPart `json:"htmlBody,omitempty"` + + // A list, traversing depth-first, of all parts in bodyStructure. + // + // They must satisfy either of the following conditions: + // + // - not of type multipart/* and not included in textBody or htmlBody + // - of type image/*, audio/*, or video/* and not in both textBody and htmlBody + // + // None of these parts include subParts, including message/* types. + // + // Attached messages may be fetched using the Email/parse method and the blobId. + // + // Note that a text/html body part HTML may reference image parts in attachments by using cid: + // links to reference the Content-Id, as defined in [RFC2392], or by referencing the Content-Location. + // + // [RFC2392]: https://www.rfc-editor.org/rfc/rfc2392.html + Attachments []EmailBodyPart `json:"attachments,omitempty"` +} + +type EmailUpdate map[string]any + +type EmailSetCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // This is a state string as returned by the `Email/get` method. + // + // If supplied, the string must match the current state; otherwise, the method will be aborted and a + // `stateMismatch` error returned. + // + // If null, any changes will be applied to the current state. + IfInState string `json:"ifInState,omitempty"` + + // A map of a creation id (a temporary id set by the client) to Email objects, + // or null if no objects are to be created. + // + // The Email object type definition may define default values for properties. + // + // Any such property may be omitted by the client. + // + // The client MUST omit any properties that may only be set by the server. + Create map[string]EmailCreate `json:"create,omitempty"` + + // A map of an id to a `Patch` object to apply to the current Email object with that id, + // or null if no objects are to be updated. + // + // A `PatchObject` is of type `String[*]` and represents an unordered set of patches. + // + // The keys are a path in JSON Pointer Format [@!RFC6901], with an implicit leading `/` (i.e., prefix each key + // with `/` before applying the JSON Pointer evaluation algorithm). + // + // All paths MUST also conform to the following restrictions; if there is any violation, the update + // MUST be rejected with an `invalidPatch` error: + // !- The pointer MUST NOT reference inside an array (i.e., you MUST NOT insert/delete from an array; the array MUST be replaced in its entirety instead). + // !- All parts prior to the last (i.e., the value after the final slash) MUST already exist on the object being patched. + // !- There MUST NOT be two patches in the `PatchObject` where the pointer of one is the prefix of the pointer of the other, e.g., `"alerts/1/offset"` and `"alerts"`. + // + // The value associated with each pointer determines how to apply that patch: + // !- If null, set to the default value if specified for this property; otherwise, remove the property from the patched object. If the key is not present in the parent, this a no-op. + // !- Anything else: The value to set for this property (this may be a replacement or addition to the object being patched). + // + // Any server-set properties MAY be included in the patch if their value is identical to the current server value + // (before applying the patches to the object). Otherwise, the update MUST be rejected with an `invalidProperties` `SetError`. + // + // This patch definition is designed such that an entire Email object is also a valid `PatchObject`. + // + // The client may choose to optimise network usage by just sending the diff or may send the whole object; the server + // processes it the same either way. + Update map[string]EmailUpdate `json:"update,omitempty"` + + // A list of ids for Email objects to permanently delete, or null if no objects are to be destroyed. + Destroy []string `json:"destroy,omitempty"` +} + +type EmailSetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // The state string that would have been returned by Email/get before making the + // requested changes, or null if the server doesn’t know what the previous state + // string was. + OldState State `json:"oldState,omitempty"` + + // The state string that will now be returned by Email/get. + NewState State `json:"newState"` + + // A map of the creation id to an object containing any properties of the created Email object + // that were not sent by the client. + // + // This includes all server-set properties (such as the id in most object types) and any properties + // that were omitted by the client and thus set to a default by the server. + // + // This argument is null if no Email objects were successfully created. + Created map[string]*Email `json:"created,omitempty"` + + // The keys in this map are the ids of all Emails that were successfully updated. + // + // The value for each id is an Email object containing any property that changed in a way not + // explicitly requested by the PatchObject sent to the server, or null if none. + // + // This lets the client know of any changes to server-set or computed properties. + // + // This argument is null if no Email objects were successfully updated. + Updated map[string]*Email `json:"updated,omitempty"` + + // A list of Email ids for records that were successfully destroyed, or null if none. + Destroyed []string `json:"destroyed,omitempty"` + + // A map of the creation id to a SetError object for each record that failed to be created, + // or null if all successful. + NotCreated map[string]SetError `json:"notCreated,omitempty"` + + // A map of the Email id to a SetError object for each record that failed to be updated, + // or null if all successful. + NotUpdated map[string]SetError `json:"notUpdated,omitempty"` + + // A map of the Email id to a SetError object for each record that failed to be destroyed, + // or null if all successful. + NotDestroyed map[string]SetError `json:"notDestroyed,omitempty"` +} + +const ( + EmailMimeType = "message/rfc822" +) + +type EmailImport struct { + // The id of the blob containing the raw message [RFC5322]. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + BlobId string `json:"blobId"` + + // The ids of the Mailboxes to assign this Email to. + // + // At least one Mailbox MUST be given. + MailboxIds map[string]bool `json:"mailboxIds"` + + // The keywords to apply to the Email. + Keywords map[string]bool `json:"keywords"` + + // (default: time of most recent Received header, or time of import + // on server if none) The receivedAt date to set on the Email. + ReceivedAt time.Time `json:"receivedAt"` +} + +type EmailImportCommand struct { + AccountId string `json:"accountId"` + + // This is a state string as returned by the Email/get method. + // + // If supplied, the string must match the current state of the account referenced + // by the accountId; otherwise, the method will be aborted and a stateMismatch + // error returned. + // + // If null, any changes will be applied to the current state. + IfInState string `json:"ifInState,omitempty"` + + // A map of creation id (client specified) to EmailImport objects. + Emails map[string]EmailImport `json:"emails"` +} + +// Successfully imported Email. +type ImportedEmail struct { + // Id of the successfully imported Email. + Id string `json:"id"` + + // Blob id of the successfully imported Email. + BlobId string `json:"blobId"` + + // Thread id of the successfully imported Email. + ThreadId string `json:"threadId"` + + // Size of the successfully imported Email. + Size int `json:"size"` +} + +type EmailImportResponse struct { + // The id of the account used for this call. + AccountId string `json:"accountId"` + + // The state string that would have been returned by Email/get on this account + // before making the requested changes, or null if the server doesn’t know + // what the previous state string was. + OldState State `json:"oldState"` + + // The state string that will now be returned by Email/get on this account. + NewState State `json:"newState"` + + // A map of the creation id to an object containing the id, blobId, threadId, + // and size properties for each successfully imported Email, or null if none. + Created map[string]ImportedEmail `json:"created"` + + // A map of the creation id to a SetError object for each Email that failed to + // be created, or null if all successful. + NotCreated map[string]SetError `json:"notCreated"` +} + +// Replies are grouped together with the original message to form a Thread. +// +// In JMAP, a Thread is simply a flat list of Emails, ordered by date. +// +// Every Email MUST belong to a Thread, even if it is the only Email in the Thread. +type Thread struct { + // The id of the Thread. + Id string + + // The ids of the Emails in the Thread, sorted by the receivedAt date of the Email, + // oldest first. + // + // If two Emails have an identical date, the sort is server dependent but MUST be + // stable (sorting by id is recommended). + EmailIds []string +} + +type ThreadGetCommand struct { + AccountId string `json:"accountId"` + Ids []string `json:"ids,omitempty"` +} + +type ThreadGetRefCommand struct { + AccountId string `json:"accountId"` + IdsRef *ResultReference `json:"#ids,omitempty"` +} + +type ThreadGetResponse struct { + AccountId string + State State + List []Thread + NotFound []any +} + +type IdentityGetCommand struct { + AccountId string `json:"accountId"` + Ids []string `json:"ids,omitempty"` +} + +type IdentitySetCommand struct { + AccountId string `json:"accountId"` + IfInState string `json:"ifInState,omitempty"` + Create map[string]Identity `json:"create,omitempty"` + Update map[string]PatchObject `json:"update,omitempty"` + Destroy []string `json:"destroy,omitempty"` +} + +type IdentitySetResponse struct { + AccountId string `json:"accountId"` + OldState State `json:"oldState,omitempty"` + NewState State `json:"newState,omitempty"` + Created map[string]Identity `json:"created,omitempty"` + Updated map[string]Identity `json:"updated,omitempty"` + Destroyed []string `json:"destroyed,omitempty"` + NotCreated map[string]SetError `json:"notCreated,omitempty"` + NotUpdated map[string]SetError `json:"notUpdated,omitempty"` + NotDestroyed map[string]SetError `json:"notDestroyed,omitempty"` +} + +type Identity struct { + // The id of the Identity. + Id string `json:"id,omitempty"` + + // The “From” name the client SHOULD use when creating a new Email from this Identity. + Name string `json:"name,omitempty"` + + // The “From” email address the client MUST use when creating a new Email from this Identity. + // + // If the mailbox part of the address (the section before the “@”) is the single character + // * (e.g., *@example.com) then the client may use any valid address ending in that domain + // (e.g., foo@example.com). + Email string `json:"email,omitempty"` + + // The Reply-To value the client SHOULD set when creating a new Email from this Identity. + ReplyTo string `json:"replyTo,omitempty"` + + // The Bcc value the client SHOULD set when creating a new Email from this Identity. + Bcc *[]EmailAddress `json:"bcc,omitempty"` + + // A signature the client SHOULD insert into new plaintext messages that will be sent from + // this Identity. + // + // Clients MAY ignore this and/or combine this with a client-specific signature preference. + TextSignature *string `json:"textSignature,omitempty"` + + // A signature the client SHOULD insert into new HTML messages that will be sent from this + // Identity. + // + // This text MUST be an HTML snippet to be inserted into the section of the HTML. + // + // Clients MAY ignore this and/or combine this with a client-specific signature preference. + HtmlSignature *string `json:"htmlSignature,omitempty"` + + // Is the user allowed to delete this Identity? + // + // Servers may wish to set this to false for the user’s username or other default address. + // + // Attempts to destroy an Identity with mayDelete: false will be rejected with a standard + // forbidden SetError. + MayDelete bool `json:"mayDelete,omitzero"` +} + +func (i Identity) AsPatch() PatchObject { + p := PatchObject{} + if i.Name != "" { + p["name"] = i.Name + } + if i.Email != "" { + p["email"] = i.Email + } + if i.ReplyTo != "" { + p["replyTo"] = i.ReplyTo + } + if i.Bcc != nil { + p["bcc"] = i.Bcc + } + if i.TextSignature != nil { + p["textSignature"] = i.TextSignature + } + if i.HtmlSignature != nil { + p["htmlSignature"] = i.HtmlSignature + } + return p +} + +type IdentityGetResponse struct { + AccountId string `json:"accountId"` + State State `json:"state"` + List []Identity `json:"list,omitempty"` + NotFound []string `json:"notFound,omitempty"` +} + +type VacationResponseGetCommand struct { + AccountId string `json:"accountId"` +} + +// Vacation Response +// +// A vacation response sends an automatic reply when a message is delivered to the mail store, +// informing the original sender that their message may not be read for some time. +// +// Automated message sending can produce undesirable behaviour. +// To avoid this, implementors MUST follow the recommendations set forth in [RFC3834]. +// +// The VacationResponse object represents the state of vacation-response-related settings for an account. +// +// [RFC3834]: https://www.rfc-editor.org/rfc/rfc3834.html +type VacationResponse struct { + // The id of the object. + // There is only ever one VacationResponse object, and its id is "singleton" + Id string `json:"id,omitempty"` + + // Should a vacation response be sent if a message arrives between the "fromDate" and "toDate"? + IsEnabled bool `json:"isEnabled"` + + // If "isEnabled" is true, messages that arrive on or after this date-time (but before the "toDate" if defined) should receive the + // user's vacation response. If null, the vacation response is effective immediately. + FromDate time.Time `json:"fromDate,omitzero"` + + // If "isEnabled" is true, messages that arrive before this date-time but on or after the "fromDate" if defined) should receive the + // user's vacation response. If null, the vacation response is effective indefinitely. + ToDate time.Time `json:"toDate,omitzero"` + + // The subject that will be used by the message sent in response to messages when the vacation response is enabled. + // If null, an appropriate subject SHOULD be set by the server. + Subject string `json:"subject,omitempty"` + + // The plaintext body to send in response to messages when the vacation response is enabled. + // If this is null, the server SHOULD generate a plaintext body part from the "htmlBody" when sending vacation responses + // but MAY choose to send the response as HTML only. If both "textBody" and "htmlBody" are null, an appropriate default + // body SHOULD be generated for responses by the server. + TextBody string `json:"textBody,omitempty"` + + // The HTML body to send in response to messages when the vacation response is enabled. + // If this is null, the server MAY choose to generate an HTML body part from the "textBody" when sending vacation responses + // or MAY choose to send the response as plaintext only. + HtmlBody string `json:"htmlBody,omitempty"` +} + +type VacationResponseGetResponse struct { + // The identifier of the account this response pertains to. + AccountId string `json:"accountId"` + + // A string representing the state on the server for all the data of this type in the account + // (not just the objects returned in this call). + // + // If the data changes, this string MUST change. If the data is unchanged, servers SHOULD return the same state string + // on subsequent requests for this data type. + State State `json:"state,omitempty"` + + // An array of VacationResponse objects. + List []VacationResponse `json:"list,omitempty"` + + // Contains identifiers of requested objects that were not found. + NotFound []any `json:"notFound,omitempty"` +} + +type VacationResponseSetCommand struct { + AccountId string `json:"accountId"` + IfInState string `json:"ifInState,omitempty"` + Create map[string]VacationResponse `json:"create,omitempty"` + Update map[string]PatchObject `json:"update,omitempty"` + Destroy []string `json:"destroy,omitempty"` +} + +type VacationResponseSetResponse struct { + AccountId string `json:"accountId"` + OldState State `json:"oldState,omitempty"` + NewState State `json:"newState,omitempty"` + Created map[string]VacationResponse `json:"created,omitempty"` + Updated map[string]VacationResponse `json:"updated,omitempty"` + Destroyed []string `json:"destroyed,omitempty"` + NotCreated map[string]SetError `json:"notCreated,omitempty"` + NotUpdated map[string]SetError `json:"notUpdated,omitempty"` + NotDestroyed map[string]SetError `json:"notDestroyed,omitempty"` +} + +// One of these attributes must be set, but not both. +type DataSourceObject struct { + DataAsText string `json:"data:asText,omitempty"` + DataAsBase64 string `json:"data:asBase64,omitempty"` +} + +type UploadObject struct { + Data []DataSourceObject `json:"data"` + Type string `json:"type,omitempty"` +} + +type BlobUploadCommand struct { + AccountId string `json:"accountId"` + Create map[string]UploadObject `json:"create"` +} + +type BlobUploadCreateResult struct { + Id string `json:"id"` + Type string `json:"type,omitempty"` + Size int `json:"size"` +} + +type BlobUploadResponse struct { + AccountId string `json:"accountId"` + Created map[string]BlobUploadCreateResult `json:"created"` +} + +const ( + BlobPropertyDataAsText = "data:asText" + BlobPropertyDataAsBase64 = "data:asBase64" + // Returns data:asText if the selected octets are valid UTF-8 or data:asBase64. + BlobPropertyData = "data" + BlobPropertySize = "size" + // https://www.iana.org/assignments/http-digest-hash-alg/http-digest-hash-alg.xhtml + BlobPropertyDigestSha256 = "digest:sha256" + // https://www.iana.org/assignments/http-digest-hash-alg/http-digest-hash-alg.xhtml + BlobPropertyDigestSha512 = "digest:sha512" +) + +type BlobGetCommand struct { + AccountId string `json:"accountId"` + Ids []string `json:"ids,omitempty"` + Properties []string `json:"properties,omitempty"` + Offset int `json:"offset,omitzero"` + Length int `json:"length,omitzero"` +} + +type BlobGetRefCommand struct { + AccountId string `json:"accountId"` + IdRef *ResultReference `json:"#ids,omitempty"` + Properties []string `json:"properties,omitempty"` + Offset int `json:"offset,omitzero"` + Length int `json:"length,omitzero"` +} + +type Blob struct { + // The unique identifier of the blob. + Id string `json:"id"` + + // (raw octets, must be UTF-8) + DataAsText string `json:"data:asText,omitempty"` + + // (base64 representation of octets) + DataAsBase64 string `json:"data:asBase64,omitempty"` + + // The base64 encoding of the digest of the octets in the selected range, + // calculated using the SHA-256 algorithm. + DigestSha256 string `json:"digest:sha256,omitempty"` + + // The base64 encoding of the digest of the octets in the selected range, + // calculated using the SHA-512 algorithm. + DigestSha512 string `json:"digest:sha512,omitempty"` + + // If an encoding problem occured. + // + // The data fields contain a representation of the octets within the selected range + // that are present in the blob. + // + // If the octets selected are not valid UTF-8 (including truncating in the middle of a + // multi-octet sequence) and data or data:asText was requested, then the key isEncodingProblem + // MUST be set to true, and the data:asText response value MUST be null. + // + // In the case where data was requested and the data is not valid UTF-8, then data:asBase64 + // MUST be returned. + IsEncodingProblem bool `json:"isEncodingProblem,omitzero"` + + // When requesting a range: the isTruncated property in the result MUST be + // set to true to tell the client that the requested range could not be fully satisfied. + IsTruncated bool `json:"isTruncated,omitzero"` + + // The number of octets in the entire blob. + Size int `json:"size"` +} + +// Picks the best digest if available, or "" +func (b *Blob) Digest() string { + if b.DigestSha512 != "" { + return b.DigestSha512 + } else if b.DigestSha256 != "" { + return b.DigestSha256 + } else { + return "" + } +} + +type BlobGetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A string representing the state on the server for all the data of this type in the + // account (not just the objects returned in this call). + // + // If the data changes, this string MUST change. If the Blob data is unchanged, servers + // SHOULD return the same state string on subsequent requests for this data type. + // + // When a client receives a response with a different state string to a previous call, + // it MUST either throw away all currently cached objects for the type or call + // Blob/changes to get the exact changes. + State State `json:"state,omitempty"` + + // An array of the Blob objects requested. + // + // This is the empty array if no objects were found or if the ids argument passed in + // was also an empty array. The results MAY be in a different order to the ids in the + // request arguments. If an identical id is included more than once in the request, + // the server MUST only include it once in either the list or the notFound argument of the response. + List []Blob `json:"list,omitempty"` + + // This array contains the ids passed to the method for records that do not exist. + // + // The array is empty if all requested ids were found or if the ids argument passed + // in was either null or an empty array. + NotFound []any `json:"notFound,omitempty"` +} + +type BlobDownload struct { + Body io.ReadCloser + Size int + Type string + ContentDisposition string + CacheControl string +} + +type UploadedBlob struct { + BlobId string `json:"blobId"` + Size int `json:"size,omitzero"` + Type string `json:"type,omitempty"` +} + +// When doing a search on a String property, the client may wish to show the relevant +// section of the body that matches the search as a preview and to highlight any +// matching terms in both this and the subject of the Email. +// +// Search snippets represent this data. +// +// What is a relevant section of the body for preview is server defined. If the server is +// unable to determine search snippets, it MUST return null for both the subject and preview +// properties. +// +// Note that unlike most data types, a SearchSnippet DOES NOT have a property called id. +type SearchSnippet struct { + // The Email id the snippet applies to. + EmailId string `json:"emailId"` + + // If text from the filter matches the subject, this is the subject of the Email + // with the following transformations: + // + // 1. Any instance of the following three characters MUST be replaced by an + // appropriate HTML entity: & (ampersand), < (less-than sign), and > (greater-than sign) + // HTML. Other characters MAY also be replaced with an HTML entity form. + // 2. The matching words/phrases from the filter are wrapped in HTML tags. + // + // If the subject does not match text from the filter, this property is null. + Subject string `json:"subject,omitempty"` + + // If text from the filter matches the plaintext or HTML body, this is the + // relevant section of the body (converted to plaintext if originally HTML), + // with the same transformations as the subject property. + // + // It MUST NOT be bigger than 255 octets in size. + // + // If the body does not contain a match for the text from the filter, this property is null. + Preview string `json:"preview,omitempty"` +} + +type SearchSnippetGetRefCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // The same filter as passed to Email/query. + Filter EmailFilterElement `json:"filter,omitempty"` + + // The ids of the Emails to fetch snippets for. + EmailIdRef *ResultReference `json:"#emailIds,omitempty"` +} + +type SearchSnippetGetResponse struct { + AccountId string `json:"accountId"` + List []SearchSnippet `json:"list,omitempty"` + NotFound []string `json:"notFound,omitempty"` +} + +type StateChangeType string + +const TypeOfStateChange = StateChangeType("StateChange") + +type StateChange struct { + // This MUST be the string "StateChange". + Type StateChangeType `json:"@type"` + + // A map of an "account id" to an object encoding the state of data types that have + // changed for that account since the last StateChange object was pushed, for each + // of the accounts to which the user has access and for which something has changed. + // + // The value is a map. The keys are the type name "Foo" e.g., "Mailbox" or "Email"), + // and the value is the "state" property that would currently be returned by a call to + // "Foo/get". + // + // The client can compare the new state strings with its current values to see whether + // it has the current data for these types. If not, the changes can then be efficiently + // fetched in a single standard API request (using the /changes type methods). + Changed map[string]map[ObjectType]string `json:"changed"` + + // A (preferably short) string that encodes the entire server state visible to the user + // (not just the objects returned in this call). + // + // The purpose of the "pushState" token is to allow a client to immediately get any changes + // that occurred while it was disconnected. If the server does not support "pushState" tokens, + // the client will have to issue a series of "/changes" requests upon reconnection to update + // its state to match that of the server. + PushState State `json:"pushState"` +} + +type AddressBookRights struct { + // The user may fetch the ContactCards in this AddressBook. + MayRead bool `json:"mayRead"` + + // The user may create, modify or destroy all ContactCards in this AddressBook, or move them to or from this AddressBook. + MayWrite bool `json:"mayWrite"` + + // The user may modify the “shareWith” property for this AddressBook. + MayAdmin bool `json:"mayAdmin"` + + // The user may delete the AddressBook itself. + MayDelete bool `json:"mayDelete"` +} + +// An AddressBook is a named collection of ContactCards. +// +// All ContactCards are associated with one or more AddressBook. +type AddressBook struct { + // The id of the AddressBook (immutable; server-set). + Id string `json:"id"` + + // The user-visible name of the AddressBook. + // + // This may be any UTF-8 string of at least 1 character in length and maximum 255 octets in size. + Name string `json:"name"` + + // An optional longer-form description of the AddressBook, to provide context in shared environments + // where users need more than just the name. + Description string `json:"description,omitempty"` + + // Defines the sort order of AddressBooks when presented in the client’s UI, so it is consistent between devices. + // + // The number MUST be an integer in the range 0 <= sortOrder < 2^31. + // + // An AddressBook with a lower order should be displayed before a AddressBook with a higher order in any list + // of AddressBooks in the client’s UI. + // + // AddressBooks with equal order SHOULD be sorted in alphabetical order by name. + // + // The sorting should take into account locale-specific character order convention. + // + // Default: 0 + SortOrder uint `json:"sortOrder,omitzero"` + + // This SHOULD be true for exactly one AddressBook in any account, and MUST NOT be true for more than one + // AddressBook within an account. + // + // The default AddressBook should be used by clients whenever they need to choose an AddressBook for the user + // within this account, and they do not have any other information on which to make a choice. + // + // For example, if the user creates a new contact card, the client may automatically set the card as belonging + // to the default AddressBook from the user’s primary account. + IsDefault bool `json:"isDefault,omitzero"` + + // True if the user has indicated they wish to see this AddressBook in their client. + // + // This SHOULD default to false for AddressBooks in shared accounts the user has access to and true for any + // new AddressBooks created by the user themself. + // + // If false, the AddressBook and its contents SHOULD only be displayed when the user explicitly requests it + // or to offer it for the user to subscribe to. + IsSubscribed bool `json:"isSubscribed"` + + // A map of Principal id to rights for principals this AddressBook is shared with. + // + // The principal to which this AddressBook belongs MUST NOT be in this set. + // + // This is null if the AddressBook is not shared with anyone. + // + // May be modified only if the user has the mayAdmin right. + // + // The account id for the principals may be found in the urn:ietf:params:jmap:principals:owner capability + // of the Account to which the AddressBook belongs. + ShareWith map[string]AddressBookRights `json:"shareWith,omitempty"` + + // The set of access rights the user has in relation to this AddressBook (server-set). + MyRights AddressBookRights `json:"myRights"` +} + +type CalendarRights struct { + // The user may read the free-busy information for this calendar. + MayReadFreeBusy bool `json:"mayReadFreeBusy"` + + // The user may fetch the events in this calendar. + MayReadItems bool `json:"mayReadItems"` + + // The user may create, modify or destroy all events in this calendar, or move events + // to or from this calendar. + // + // If this is `true`, the `mayWriteOwn`, `mayUpdatePrivate` and `mayRSVP` + // properties MUST all also be `true`. + MayWriteAll bool `json:"mayWriteAll"` + + // The user may create, modify or destroy an event on this calendar if either they are + // the owner of the event or the event has no owner. + // + // This means the user may also transfer ownership by updating an event so they are no longer an owner. + MayWriteOwn bool `json:"mayWriteOwn"` + + // The user may modify per-user properties on all events in the calendar, even if they would + // not otherwise have permission to modify that event. + // + // These properties MUST all be stored per-user, and changes do not affect any other user of the calendar. + // + // The user may also modify these properties on a per-occurrence basis for recurring events + // (updating the `recurrenceOverrides` property of the event to do so). + MayUpdatePrivate bool `json:"mayUpdatePrivate"` + + // The user may modify the following properties of any `Participant` object that corresponds + // to one of the user's `ParticipantIdentity` objects in the account, even if they would not + // otherwise have permission to modify that event. + // + // !- `participationStatus` + // !- `participationComment` + // !- `expectReply` + // !- `scheduleAgent` + // !- `scheduleSequence` + // !- `scheduleUpdated` + // + // If the event has its `mayInviteSelf` property set to `true`, then the user may also add a + // new `Participant` to the event with `scheduleId`/`sendTo` properties that are the same as + // the `scheduleId`/`sendTo` properties of one of the user's `ParticipantIdentity` objects in + // the account. + // + // The `roles` property of the participant MUST only contain `attendee`. + // + // If the event has its `mayInviteOthers` property set to `true` and there is an existing + // `Participant` in the event corresponding to one of the user's `ParticipantIdentity` objects + // in the account, then the user may also add new participants. + // + // The `roles` property of any new participant MUST only contain `attendee`. + // + // The user may also do all of the above on a per-occurrence basis for recurring events + // (updating the recurrenceOverrides property of the event to do so). + MayRSVP bool `json:"mayRSVP"` + + // The user may modify the `shareWith` property for this calendar. + MayAdmin bool `json:"mayAdmin"` + + // The user may delete the calendar itself. + MayDelete bool `json:"mayDelete"` +} + +// A Calendar is a named collection of events. +// +// All events are associated with at least one calendar. +// +// The user is an owner for an event if the `CalendarEvent` object has a `participants` +// property, and one of the `Participant` objects both: +// 1. Has the `owner` role. +// 2. Corresponds to one of the user's `ParticipantIdentity` objects in the account. +// +// An event has no owner if its `participants` property is null or omitted, or if none +// of the `Participant` objects have the `owner` role. +type Calendar struct { + // The id of the calendar (immutable; server-set). + Id string `json:"id"` + + // The user-visible name of the calendar. + // + // This may be any UTF-8 string of at least 1 character in length and maximum 255 octets in size. + Name string `json:"name"` + + // An optional longer-form description of the calendar, to provide context in shared environments + // where users need more than just the name. + Description string `json:"description,omitempty"` + + // A color to be used when displaying events associated with the calendar. + // + // If not null, the value MUST be a case-insensitive color name taken from the set of names + // defined in Section 4.3 of CSS Color Module Level 3 COLORS, or an RGB value in hexadecimal + // notation, as defined in Section 4.2.1 of CSS Color Module Level 3. + // + // The color SHOULD have sufficient contrast to be used as text on a white background. + Color string `json:"color,omitempty"` + + // Defines the sort order of calendars when presented in the client’s UI, so it is consistent + // between devices. + // + // The number MUST be an integer in the range 0 <= sortOrder < 2^31. + // + // A calendar with a lower order should be displayed before a calendar with a higher order in any + // list of calendars in the client’s UI. + // + // Calendars with equal order SHOULD be sorted in alphabetical order by name. + // + // The sorting should take into account locale-specific character order convention. + SortOrder uint `json:"sortOrder,omitzero"` + + // True if the user has indicated they wish to see this Calendar in their client. + // + // This SHOULD default to `false` for Calendars in shared accounts the user has access to and `true` + // for any new Calendars created by the user themself. + // + // If false, the calendar SHOULD only be displayed when the user explicitly requests it or to offer + // it for the user to subscribe to. + // + // For example, a company may have a large number of shared calendars which all employees have + // permission to access, but you would only subscribe to the ones you care about and want to be able + // to have normally accessible. + IsSubscribed bool `json:"isSubscribed"` + + // Should the calendar’s events be displayed to the user at the moment? + // + // Clients MUST ignore this property if `isSubscribed` is false. + // + // If an event is in multiple calendars, it should be displayed if `isVisible` is `true` + // for any of those calendars. + // + // default: true + IsVisible bool `json:"isVisible"` + + // This SHOULD be true for exactly one calendar in any account, and MUST NOT be true for more + // than one calendar within an account (server-set). + // + // The default calendar should be used by clients whenever they need to choose a calendar + // for the user within this account, and they do not have any other information on which to make + // a choice. + // + // For example, if the user creates a new event, the client may automatically set the event as + // belonging to the default calendar from the user’s primary account. + IsDefault bool `json:"isDefault,omitzero"` + + // Should the calendar’s events be used as part of availability calculation? + // + // This MUST be one of: + // !- `all``: all events are considered. + // !- `attending``: events the user is a confirmed or tentative participant of are considered. + // !- `none``: all events are ignored (but may be considered if also in another calendar). + // + // This should default to “all” for the calendars in the user’s own account, and “none” for calendars shared with the user. + IncludeInAvailability IncludeInAvailability `json:"includeInAvailability,omitempty"` + + // A map of alert ids to Alert objects (see [@!RFC8984], Section 4.5.2) to apply for events + // where `showWithoutTime` is `false` and `useDefaultAlerts` is `true`. + // + // Ids MUST be unique across all default alerts in the account, including those in other + // calendars; a UUID is recommended. + // + // The "trigger" MUST NOT be an `AbsoluteTrigger`, as this would fire for every event at the same + // time and so does not make sense for a default alert. + // + // If omitted on creation, the default is server dependent. + // + // For example, servers may choose to always default to null, or may copy the alerts from the default calendar. + DefaultAlertsWithTime map[string]jscalendar.Alert `json:"defaultAlertsWithTime,omitempty"` + + // A map of alert ids to Alert objects (see [@!RFC8984], Section 4.5.2) to apply for events where + // `showWithoutTime` is `true` and `useDefaultAlerts` is `true`. + // + // Ids MUST be unique across all default alerts in the account, including those in other + // calendars; a UUID is recommended. + // + // The "trigger" MUST NOT be an `AbsoluteTrigger`, as this would fire for every event at the + // same time and so does not make sense for a default alert. + // + // If omitted on creation, the default is server dependent. + // + // For example, servers may choose to always default to null, or may copy the alerts from the default calendar. + DefaultAlertsWithoutTime map[string]jscalendar.Alert `json:"defaultAlertsWithoutTime,omitempty"` + + // The time zone to use for events without a time zone when the server needs to resolve them into + // absolute time, e.g., for alerts or availability calculation. + // + // The value MUST be a time zone id from the IANA Time Zone Database TZDB. + // + // If null, the `timeZone` of the account’s associated `Principal` will be used. + // + // Clients SHOULD use this as the default for new events in this calendar if set. + TimeZone string `json:"timeZone,omitempty"` + + // A map of `Principal` id to rights for principals this calendar is shared with. + // + // The principal to which this calendar belongs MUST NOT be in this set. + // + // This is null if the calendar is not shared with anyone. + // + // May be modified only if the user has the `mayAdmin` right. + // + // The account id for the principals may be found in the `urn:ietf:params:jmap:principals:owner` + // capability of the `Account` to which the calendar belongs. + ShareWith map[string]CalendarRights `json:"shareWith,omitempty"` + + // The set of access rights the user has in relation to this Calendar. + // + // If any event is in multiple calendars, the user has the following rights: + // !- The user may fetch the event if they have the mayReadItems right on any calendar the event is in. + // !- The user may remove an event from a calendar (by modifying the event’s “calendarIds” property) if the user + // has the appropriate permission for that calendar. + // !- The user may make other changes to the event if they have the right to do so in all calendars to which the + // event belongs. + MyRights *CalendarRights `json:"myRights,omitempty"` +} + +// A CalendarEvent object contains information about an event, or recurring series of events, +// that takes place at a particular time. +// +// It is a JSCalendar Event object, as defined in [@!RFC8984], with additional properties. +type CalendarEvent struct { + + // The id of the CalendarEvent (immutable; server-set). + // + // The id uniquely identifies a JSCalendar Event with a particular `uid` and + // `recurrenceId` within a particular account. + Id string `json:"id,omitempty"` + + // This is only defined if the `id` property is a synthetic id, generated by the + // server to represent a particular instance of a recurring event (immutable; server-set). + // + // This property gives the id of the "real" `CalendarEvent` this was generated from. + BaseEventId string `json:"baseEventId,omitempty"` + + // The set of Calendar ids this event belongs to. + // + // An event MUST belong to one or more Calendars at all times (until it is destroyed). + // + // The set is represented as an object, with each key being a Calendar id. + // + // The value for each key in the object MUST be `true`. + CalendarIds map[string]bool `json:"calendarIds,omitempty"` + + // If true, this event is to be considered a draft. + // + // The server will not send any scheduling messages to participants or send push notifications + // for alerts. + // + // This may only be set to `true` upon creation. + // + // Once set to `false`, the value cannot be updated to `true`. + // + // This property MUST NOT appear in `recurrenceOverrides`. + IsDraft bool `json:"isDraft,omitzero"` + + // Is this the authoritative source for this event (i.e., does it control scheduling for + // this event; the event has not been added as a result of an invitation from another calendar system)? + // + // This is true if, and only if: + // !- the event’s `replyTo` property is null; or + // !- the account will receive messages sent to at least one of the methods specified in the `replyTo` property of the event. + IsOrigin bool `json:"isOrigin,omitzero"` + + // For simple clients that do not implement time zone support. + // + // Clients should only use this if also asking the server to expand recurrences, as you cannot accurately + // expand a recurrence without the original time zone. + // + // This property is calculated at fetch time by the server. + // + // Time zones are political and they can and do change at any time. + // + // Fetching exactly the same property again may return a different results if the time zone data has been updated on the server. + // + // Time zone data changes are not considered `updates` to the event. + // + // If set, the server will convert the UTC date to the event's current time zone and store the local time. + // + // This property is not included in `CalendarEvent/get` responses by default and must be requested explicitly. + // + // Floating events (events without a time zone) will be interpreted as per the time zone given as a `CalendarEvent/get` argument. + // + // Note that it is not possible to accurately calculate the expansion of recurrence rules or recurrence overrides with the + // `utcStart` property rather than the local start time. Even simple recurrences such as "repeat weekly" may cross a + // daylight-savings boundary and end up at a different UTC time. Clients that wish to use "utcStart" are RECOMMENDED to + // request the server expand recurrences. + UtcStart UTCDate `json:"utcStart,omitzero"` + + // The server calculates the end time in UTC from the start/timeZone/duration properties of the event. + // + // This property is not included by default and must be requested explicitly. + // + // Like `utcStart`, it is calculated at fetch time if requested and may change due to time zone data changes. + // + // Floating events will be interpreted as per the time zone given as a `CalendarEvent/get` argument. + UtcEnd UTCDate `json:"utcEnd,omitzero"` + + jscalendar.Event +} + +const ( + CalendarEventPropertyId = "id" + CalendarEventPropertyBaseEventId = "baseEventId" + CalendarEventPropertyCalendarIds = "calendarIds" + CalendarEventPropertyIsDraft = "isDraft" + CalendarEventPropertyIsOrigin = "isOrigin" + CalendarEventPropertyUtcStart = "utcStart" + CalendarEventPropertyUtcEnd = "utcEnd" + CalendarEventPropertyType = "type" + CalendarEventPropertyStart = "start" + CalendarEventPropertyDuration = "duration" + CalendarEventPropertyStatus = "status" + CalendarEventPropertyRelatedTo = "relatedTo" + CalendarEventPropertySequence = "sequence" + CalendarEventPropertyShowWithoutTime = "showWithoutTime" + CalendarEventPropertyLocations = "locations" + CalendarEventPropertyVirtualLocations = "virtualLocations" + CalendarEventPropertyRecurrenceId = "recurrenceId" + CalendarEventPropertyRecurrenceIdTimeZone = "recurrenceIdTimeZone" + CalendarEventPropertyRecurrenceRules = "recurrenceRules" + CalendarEventPropertyExcludedRecurrenceRules = "excludedRecurrenceRules" + CalendarEventPropertyRecurrenceOverrides = "recurrenceOverrides" + CalendarEventPropertyExcluded = "excluded" + CalendarEventPropertyPriority = "priority" + CalendarEventPropertyFreeBusyStatus = "freeBusyStatus" + CalendarEventPropertyPrivacy = "privacy" + CalendarEventPropertyReplyTo = "replyTo" + CalendarEventPropertySentBy = "sentBy" + CalendarEventPropertyParticipants = "participants" + CalendarEventPropertyRequestStatus = "requestStatus" + CalendarEventPropertyUseDefaultAlerts = "useDefaultAlerts" + CalendarEventPropertyAlerts = "alerts" + CalendarEventPropertyLocalizations = "localizations" + CalendarEventPropertyTimeZone = "timeZone" + CalendarEventPropertyMayInviteSelf = "mayInviteSelf" + CalendarEventPropertyMayInviteOthers = "mayInviteOthers" + CalendarEventPropertyHideAttendees = "hideAttendees" + CalendarEventPropertyUid = "uid" + CalendarEventPropertyProdId = "prodId" + CalendarEventPropertyCreated = "created" + CalendarEventPropertyUpdated = "updated" + CalendarEventPropertyTitle = "title" + CalendarEventPropertyDescription = "description" + CalendarEventPropertyDescriptionContentType = "descriptionContentType" + CalendarEventPropertyLinks = "links" + CalendarEventPropertyLocale = "locale" + CalendarEventPropertyKeywords = "keywords" + CalendarEventPropertyCategories = "categories" + CalendarEventPropertyColor = "color" + CalendarEventPropertyTimeZones = "timeZones" +) + +var CalendarEventProperties = []string{ + CalendarEventPropertyId, + CalendarEventPropertyBaseEventId, + CalendarEventPropertyCalendarIds, + CalendarEventPropertyIsDraft, + CalendarEventPropertyIsOrigin, + CalendarEventPropertyUtcStart, + CalendarEventPropertyUtcEnd, + CalendarEventPropertyType, + CalendarEventPropertyStart, + CalendarEventPropertyDuration, + CalendarEventPropertyStatus, + CalendarEventPropertyRelatedTo, + CalendarEventPropertySequence, + CalendarEventPropertyShowWithoutTime, + CalendarEventPropertyLocations, + CalendarEventPropertyVirtualLocations, + CalendarEventPropertyRecurrenceId, + CalendarEventPropertyRecurrenceIdTimeZone, + CalendarEventPropertyRecurrenceRules, + CalendarEventPropertyExcludedRecurrenceRules, + CalendarEventPropertyRecurrenceOverrides, + CalendarEventPropertyExcluded, + CalendarEventPropertyPriority, + CalendarEventPropertyFreeBusyStatus, + CalendarEventPropertyPrivacy, + CalendarEventPropertyReplyTo, + CalendarEventPropertySentBy, + CalendarEventPropertyParticipants, + CalendarEventPropertyRequestStatus, + CalendarEventPropertyUseDefaultAlerts, + CalendarEventPropertyAlerts, + CalendarEventPropertyLocalizations, + CalendarEventPropertyTimeZone, + CalendarEventPropertyMayInviteSelf, + CalendarEventPropertyMayInviteOthers, + CalendarEventPropertyHideAttendees, + CalendarEventPropertyUid, + CalendarEventPropertyProdId, + CalendarEventPropertyCreated, + CalendarEventPropertyUpdated, + CalendarEventPropertyTitle, + CalendarEventPropertyDescription, + CalendarEventPropertyDescriptionContentType, + CalendarEventPropertyLinks, + CalendarEventPropertyLocale, + CalendarEventPropertyKeywords, + CalendarEventPropertyCategories, + CalendarEventPropertyColor, + CalendarEventPropertyTimeZones, +} + +// A ParticipantIdentity stores information about a URI that represents the user within that account in an event’s participants. +type ParticipantIdentity struct { + // The id of the ParticipantIdentity (immutable; server-set). + Id string `json:"id"` + + // The display name of the participant to use when adding this participant to an event, e.g. "Joe Bloggs". + // + // default: + Name string `json:"name,omitempty"` + + // The URI that represents this participant for scheduling. + // + // This URI MAY also be the URI for one of the sendTo methods. + ScheduleId string `json:"scheduleId"` + + // Represents methods by which the participant may receive invitations and updates to an event. + // + // The keys in the property value are the available methods and MUST only contain ASCII alphanumeric + // characters (`A-Za-z0-9`). + // + // The value is a URI for the method specified in the key. + SendTo map[string]string `json:"sendTo,omitempty"` + + // This SHOULD be true for exactly one participant identity in any account, and MUST NOT be true for more + // than one participant identity within an account (server-set). + // + // The default identity should be used by clients whenever they need to choose an identity for the user + // within this account, and they do not have any other information on which to make a choice. + // + // For example, if creating a scheduled event in this account, the default identity may be automatically + // added as an owner. (But the client may ignore this if, for example, it has its own feature to allow + // users to choose which identity to use based on the invitees.) + IsDefault bool `json:"isDefault,omitzero"` +} + +type CalendarAlert struct { + // This MUST be the string `CalendarAlert`. + Type TypeOfCalendarAlert `json:"@type,omitempty"` + + // The account id for the calendar in which the alert triggered. + AccountId string `json:"accountId"` + + // The CalendarEvent id for the alert that triggered. + // + // Note, for a recurring event this is the id of the base event, never a synthetic id for a particular instance. + CalendarEventId string `json:"calendarEventId"` + + // The uid property of the CalendarEvent for the alert that triggered. + Uid string `json:"uid"` + + // The `recurrenceId` for the instance of the event for which this alert is being + // triggered, or null if the event is not recurring. + RecurrenceId LocalDate `json:"recurrenceId,omitzero"` + + // The id for the alert that triggered. + AlertId string `json:"alertId"` +} + +type Person struct { + // The name of the person who made the change. + Name string `json:"name"` + + // The email of the person who made the change, or null if no email is available. + Email string `json:"email,omitempty"` + + // The id of the `Principal` corresponding to the person who made the change, if any. + // + // This will be null if the change was due to receving an iTIP message. + PrincipalId string `json:"principalId,omitempty"` + + // The `scheduleId` URI of the person who made the change, if any. + // + // This will normally be set if the change was made due to receving an iTIP message. + ScheduleId string `json:"scheduleId,omitempty"` +} + +type CalendarEventNotification struct { + // The id of the `CalendarEventNotification`. + Id string `json:"id"` + + // The time this notification was created. + Created UTCDate `json:"created,omitzero"` + + // Who made the change. + ChangedBy *Person `json:"person,omitempty"` + + // Comment sent along with the change by the user that made it. + // + // (e.g. `COMMENT` property in an iTIP message), if any. + Comment string `json:"comment,omitempty"` + + // `CalendarEventNotification` type. + // + // This MUST be one of + // !- `created` + // !- `updated` + // !- `destroyed` + Type CalendarEventNotificationTypeOption `json:"type"` + + // The id of the CalendarEvent that this notification is about. + // + // If the change only affects a single instance of a recurring event, the server MAY set the + // `event` and `event`atch properties for just that instance; the `calendarEventId` MUST + // still be for the base event. + CalendarEventId string `json:"calendarEventId"` + + // Is this event a draft? (created/updated only) + IsDraft bool `json:"isDraft,omitzero"` + + // The data before the change (if updated or destroyed), + // or the data after creation (if created). + Event *jscalendar.Event `json:"event,omitempty"` + + // A patch encoding the change between the data in the event property, + // and the data after the update (updated only). + EventPatch PatchObject `json:"eventPatch,omitempty"` +} + +// Denotes the task list has a special purpose. +// +// This MUST be one of the following: +// !- `inbox`: This is the principal’s default task list; +// !- `trash`: This task list holds messages the user has discarded; +type TaskListRole string + +const ( + // This is the principal’s default task list. + TaskListRoleInbox = TaskListRole("inbox") + // This task list holds messages the user has discarded. + TaskListRoleTrash = TaskListRole("trash") +) + +var ( + DefaultWorkflowStatuses = []string{ + "completed", + "failed", + "in-process", + "needs-action", + "cancelled", + "pending", + } +) + +type TaskRights struct { + // The user may fetch the tasks in this task list. + MayReadItems bool `json:"mayReadItems"` + + // The user may create, modify or destroy all tasks in this task list, + // or move tasks to or from this task list. + // + // If this is `true`, the `mayWriteOwn`, `mayUpdatePrivate` and `mayRSVP` properties + // MUST all also be `true`. + MayWriteAll bool `json:"mayWriteAll"` + + // The user may create, modify or destroy a task on this task list if either they are + // the owner of the task (see below) or the task has no owner. + // + // This means the user may also transfer ownership by updating a task so they are no longer + // an owner. + MayWriteOwn bool `json:"mayWriteOwn"` + + // The user may modify the following properties on all tasks in the task list, even + // if they would not otherwise have permission to modify that task. + // + // These properties MUST all be stored per-user, and changes do not affect any other user of the task list. + // + // The user may also modify the above on a per-occurrence basis for recurring tasks + // (updating the `recurrenceOverrides` property of the task to do so). + MayUpdatePrivate bool `json:"mayUpdatePrivate"` + + // The user may modify the following properties of any `Participant` object that corresponds + // to one of the user’s `ParticipantIdentity` objects in the account, even if they would not + // otherwise have permission to modify that task + // !- `participationStatus` + // !- `participationComment` + // !- `expectReply` + // + // If the task has its `mayInviteSelf` property set to true, then the user may also add a new + // `Participant` to the task with a `sendTo` property that is the same as the `sendTo` property + // of one of the user’s `ParticipantIdentity` objects in the account. + // The `roles` property of the participant MUST only contain `attendee`. + // + // If the task has its `mayInviteOthers` property set to `true` and there is an existing + // `Participant` in the task corresponding to one of the user’s `ParticipantIdentity` objects + // in the account, then the user may also add new participants. + // The `roles` property of any new participant MUST only contain `attendee`. + // + // The user may also do all of the above on a per-occurrence basis for recurring tasks + // (updating the `recurrenceOverrides` property of the task to do so). + MayRSVP bool `json:"mayRSVP"` + + // The user may modify sharing for this task list. + MayAdmin bool `json:"mayAdmin"` + + // The user may delete the task list itself (server-set). + // + // This property MUST be false if the account to which this task list belongs has the `isReadOnly` + // property set to true. + MayDelete bool `json:"mayDelete"` +} + +type TaskList struct { + // The id of the task list (immutable; server-set). + Id string `json:"id,omitempty"` + + // Denotes the task list has a special purpose. + // + // This MUST be one of the following: + // !- `inbox`: This is the principal’s default task list; + // !- `trash`: This task list holds messages the user has discarded; + Role TaskListRole `json:"role,omitempty"` + + // The user-visible name of the task list. + // + // This may be any UTF-8 string of at least 1 character in length and maximum 255 octets in size. + Name string `json:"name,omitempty"` + + // An optional longer-form description of the task list, to provide context in shared environments + // where users need more than just the name. + Description string `json:"description,omitempty"` + + // A color to be used when displaying tasks associated with the task list. + // + // If not null, the value MUST be a case-insensitive color name taken from the set of names defined + // in Section 4.3 of CSS Color Module Level 3 COLORS, or an RGB value in hexadecimal notation, + // as defined in Section 4.2.1 of CSS Color Module Level 3. + // + // The color SHOULD have sufficient contrast to be used as text on a white background. + Color string `json:"color,omitempty"` + + // A map of keywords to the colors used when displaying the keywords associated to a task. + // + // The same considerations, as for `color` above, apply. + KeywordColors map[string]string `json:"keywordColors,omitempty"` + + // A map of categories to the colors used when displaying the categories associated to a task. + // + // The same considerations, as for `color` above, apply. + CategoryColors map[string]string `json:"categoryColors,omitempty"` + + // Defines the sort order of task lists when presented in the client’s UI, so it is consistent + // between devices. + // + // The number MUST be an integer in the range 0 ≤ sortOrder < 2^31. + // + // A task list with a lower order should be displayed before a list with a higher order in any list + // of task lists in the client’s UI. + // + // Task lists with equal order SHOULD be sorted in alphabetical order by name. + // + // The sorting should take into account locale-specific character order convention. + SortOrder uint `json:"sortOrder,omitzero"` + + // Has the user indicated they wish to see this task list in their client? + // + // This SHOULD default to false for task lists in shared accounts the user has access to, + // and true for any new task list created by the user themselves. + // + // If false, the task list should only be displayed when the user explicitly + // requests it or to offer it for the user to subscribe to. + IsSubscribed bool `json:"isSubscribed,omitzero"` + + // The time zone to use for tasks without a time zone when the server needs to resolve them + // into absolute time, e.g., for alerts or availability calculation. + // + // The value MUST be a time zone id from the IANA Time Zone Database TZDB. + // + // If null, the timeZone of the account’s associated Principal will be used. + // + // Clients SHOULD use this as the default for new tasks in this task list, if set. + TimeZone string `json:"timeZone,omitempty"` + + // Defines the allowed values for `workflowStatus`. + // + // The default values are based on the values defined within [@!RFC8984], Section 5.2.5 and `pending`. + // + // `pending` indicates the task has been created and accepted, but it currently is on-hold. + // + // As naming and workflows differ between systems, mapping the status correctly to the present values + // of the `Task` can be challenging. In the most simple case, a task system may support merely two states - `done` + // and `not-done`. + // + // On the other hand, statuses and their semantic meaning can differ between systems or task lists (e.g. projects). + // + // In case of uncertainty, here are some recommendations for mapping commonly observed values that can help + // during implementation: + // !- `completed`: `done` (most simple case), `closed`, `verified`, … + // !- `in-process`: `in-progress`, `active`, `assigned`, … + // !- `needs-action`: `not-done` (most simple case), `not-started`, `new`, … + // !- `pending`: `waiting`, `deferred`, `on-hold`, `paused`, … + WorkflowStatuses []string `json:"workflowStatuses,omitempty"` + + // A map of `Principal` id to rights for principals this task list is shared with. + // + // The principal to which this task list belongs MUST NOT be in this set. + // + // This is null if the task list is not shared with anyone. + // + // May be modified only if the user has the `mayAdmin` right. + // + // The account id for the principals may be found in the `urn:ietf:params:jmap:principals:owner` capability + // of the `Account` to which the task list belongs. + ShareWith map[string]TaskRights `json:"shareWith,omitempty"` + + // The set of access rights the user has in relation to this `TaskList`. + // + // The user may fetch the task if they have the `mayReadItems` right on any task list the task is in. + // + // The user may remove a task from a task list (by modifying the task’s `taskListId` property) if the user has the + // appropriate permission for that task list. + // + // The user may make other changes to the task if they have the right to do so in all task list to which the task belongs. + MyRights *TaskRights `json:"myRights,omitempty"` + + // A map of alert ids to `Alert` objects (see [@!RFC8984], Section 4.5.2) to apply for tasks + // where `showWithoutTime` is `false` and `useDefaultAlerts` is `true`. + // + // Ids MUST be unique across all default alerts in the account, including those in other task + // lists; a UUID is recommended. + // + // If omitted on creation, the default is server dependent. + // + // For example, servers may choose to always default to null, or may copy the alerts from the default task list. + DefaultAlertsWithTime map[string]jscalendar.Alert `json:"defaultAlertsWithTime,omitempty"` + + // A map of alert ids to `Alert` objects (see [@!RFC8984], Section 4.5.2) to apply for tasks + // where `showWithoutTime` is `true` and `useDefaultAlerts` is `true`. + // + // Ids MUST be unique across all default alerts in the account, including those in other task + // lists; a UUID is recommended. + // + // If omitted on creation, the default is server dependent. For example, servers may choose to always + // default to `null`, or may copy the alerts from the default task list. + DefaultAlertsWithoutTime map[string]jscalendar.Alert `json:"defaultAlertsWithoutTime,omitempty"` +} + +type TypeOfChecklist string +type TypeOfCheckItem string +type TypeOfTaskPerson string +type TypeOfComment string + +type TaskNotificationTypeOption string + +const ChecklistType = TypeOfChecklist("Checklist") +const CheckItemType = TypeOfCheckItem("CheckItem") +const TaskPersonType = TypeOfTaskPerson("Person") +const CommentType = TypeOfComment("Comment") +const TaskNotificationTypeOptionCreated = TaskNotificationTypeOption("created") +const TaskNotificationTypeOptionUpdated = TaskNotificationTypeOption("updated") +const TaskNotificationTypeOptionDestroyed = TaskNotificationTypeOption("destroyed") + +// The Person object has the following properties of which either principalId or uri MUST be defined. +type TaskPerson struct { + // Specifies the type of this object, this MUST be `Person`. + Type TypeOfTaskPerson `json:"@type,omitempty"` + + // The name of the person. + Name string `json:"name,omitempty"` + + // A URI value that identifies the person. + // + // This SHOULD be the `scheduleId` of the participant that this item was assigned to. + Uri string `json:"uri,omitempty"` + + // The id of the Principal corresponding to the person, if any. + PrincipalId string `json:"principalId,omitempty"` +} + +type Comment struct { + // Specifies the type of this object, this MUST be `Comment`. + Type TypeOfComment `json:"@type,omitempty"` + + // The free text value of this comment. + Message string `json:"message"` + + // The date and time when this note was created. + Created UTCDate `json:"created,omitzero"` + + // The date and time when this note was updated. + Updated UTCDate `json:"updated,omitzero"` + + // The author of this comment. + Author *TaskPerson `json:"author,omitempty"` +} + +type CheckItem struct { + // Specifies the type of this object, this MUST be `CheckItem`. + Type TypeOfCheckItem `json:"@type,omitempty"` + + // Title of the item. + Title string `json:"title,omitempty"` + + // Defines the sort order of `CheckItem` when presented in the client’s UI. + // + // The number MUST be an integer in the range 0 <= sortOrder < 2^31. + // + // An item with a lower order should be displayed before an item with a higher order. + // + // Items with equal order SHOULD be sorted in alphabetical order by name. + // + // The sorting should take into account locale-specific character order convention. + SortOrder uint `json:"sortOrder,omitzero"` + + // The date and time when this item was updated. + Updated UTCDate `json:"updated,omitzero"` + + IsComplete bool `json:"isComplete,omitzero"` + + // The person that this item is assigned to. + // + // The `Person` object has the following properties of which either `principalId` or `uri` + // MUST be defined. + Assignee *TaskPerson `json:"assignee,omitempty"` + + // Free-text comments associated with this task. + Comments map[string]Comment `json:"comments,omitempty"` +} + +type Checklist struct { + // Specifies the type of this object, this MUST be `Checklist`. + Type TypeOfChecklist `json:"@type,omitempty"` + + // Title of the list. + Title string `json:"title,omitempty"` + + // The items of the check list. + CheckItems []CheckItem `json:"checkItems,omitempty"` +} + +// A `Task` object contains information about a task. +// +// It is a JSTask object, as defined in [@!RFC8984]. However, as use-cases of task systems vary, this +// Section defines relevant parts of the JSTask object to implement the core task capability as well +// as several extensions to it. +// +// Only the core capability MUST be implemented by any task system. +// +// Implementers can choose the extensions that fit their own use case. +// +// For example, the recurrence extension allows having a `Task` object represent a series of recurring `Task`s. +type Task struct { + + // The id of the Task. + // + // This property is immutable. + // + // The id uniquely identifies a JSTask with a particular `uid` and `recurrenceId` within a particular account. + Id string `json:"id"` + + // The `TaskList` id this task belongs to. + // + // A task MUST belong to exactly one `TaskList` at all times (until it is destroyed). + TaskListId string `json:"taskListId"` + + // If `true`, this task is to be considered a draft. + // + // The server will not send any push notifications for alerts. + // + // This may only be set to true upon creation. + // + // Once set to `false`, the value cannot be updated to `true`. + // + // This property MUST NOT appear in `recurrenceOverrides`. + IsDraft bool `json:"isDraft,omitzero"` + + UtcStart UTCDate `json:"utcStart,omitzero"` + + UtcDue UTCDate `json:"utcDue,omitzero"` + + SortOrder uint `json:"sortOrder,omitzero"` + + WorkflowStatus string `json:"workflowStatus,omitempty"` + + jscalendar.Task + + // This specifies the estimated amount of work the task takes to complete. + // + // In Agile software development or Scrum, it is known as complexity or story points. + // + // The number has no actual unit, but a larger number means more work. + EstimatedWork uint `json:"estimatedWork,omitzero"` + + // This specifies the impact or severity of the task, but does not say anything + // about the actual prioritization. + // + // Some examples are: `minor`, `trivial`, `major` or `block`. + // + // Usually, the priority of a task is based upon its impact and urgency. + Impact string `json:"impact,omitempty"` + + // A map of Checklist IDs to Checklist objects, containing checklist items. + Checklists map[string]Checklist `json:"checklists,omitempty"` + + // This is only defined if the id property is a synthetic id, generated by the server + // to represent a particular instance of a recurring Task (immutable; server-set). + // + // This property gives the id of the “real” Task this was generated from. + BaseTaskId string `json:"baseTaskId,omitempty"` + + // Is this the authoritative source for this task (i.e., does it control scheduling + // for this task; the task has not been added as a result of an invitation from another + // task management system)? + // + // This is `true` if, and only if: + // !- the task’s “replyTo” property is null; or + // !- the account will receive messages sent to at least one of the methods specified in + // the `replyTo` property of the task. + IsOrigin bool `json:"isOrigin,omitzero"` + + // If true, any user that has access to the task may add themselves to it as a participant + // with the `attendee` role. + // + // This property MUST NOT be altered in the `recurrenceOverrides`; it may only be set on the master object. + // + // This indicates the task will accept “party crasher” RSVPs via iTIP, subject to any other domain-specific + // restrictions, and users may add themselves to the task via JMAP as long as they have the `mayRSVP` + // permission for the task list. + // + // default: false + MayInviteSelf bool `json:"mayInviteSelf,omitzero"` + + // If true, any current participant with the `attendee` role may add new participants with + // the `attendee` role to the task. + // + // This property MUST NOT be altered in the `recurrenceOverrides`; it may only be set on the master object. + // + // default: false + MayInviteOthers bool `json:"mayInviteOthers,omitzero"` + + // If true, only the owners of the task may see the full set of participants. + // + // Other sharees of the task may only see the owners and themselves. + // + // This property MUST NOT be altered in the `recurrenceOverrides`; it may only be set on the master object. + HideAttendees bool `json:"hideAttendees,omitzero"` +} + +// The `TaskNotification` data type records changes made by external entities to tasks in task lists +// the user is subscribed to. +// +// Notifications are stored in the same `Account` as the `Task` that was changed. +type TaskNotification struct { + // The id of the `TaskNotification`. + Id string `json:"id"` + + // The time this notification was created. + Created UTCDate `json:"created,omitzero"` + + // Who made the change. + ChangedBy *TaskPerson `json:"changedBy,omitempty"` + + // Comment sent along with the change by the user that made it. + // + // (e.g. `COMMENT` property in an iTIP message), if any. + Comment string `json:"comment,omitempty"` + + // This MUST be one of + // !- `created` + // !- `updated` + // !- `destroyed` + Type TaskNotificationTypeOption `json:"type"` + + // The id of the Task that this notification is about. + TaskId string `json:"taskId"` + + // Is this task a draft? (created/updated only) + IsDraft bool `json:"isDraft,omitzero"` + + // The data before the change (if updated or destroyed), or the data after creation (if created). + Task *jscalendar.Task `json:"task,omitempty"` + + // A patch encoding the change between the data in the task property, and the data after the update updated only). + TaskPatch PatchObject `json:"taskPatch,omitempty"` +} + +// A Principal represents an individual, group, location (e.g. a room), resource (e.g. a projector) or other entity +// in a collaborative environment. +// +// Sharing in JMAP is generally configured by assigning rights to certain data within an account to other principals, +// for example a user may assign permission to read their calendar to a principal representing another user, or their team. +// +// In a shared environment such as a workplace, a user may have access to a large number of principals. +// +// In most systems the user will have access to a single `Account` containing `Principal` objects, but they may +// have access to multiple if, for example, aggregating data from different places. +type Principal struct { + // The id of the principal. + Id string `json:"id"` + + // `Principal` type. + // + // This MUST be one of the following values: + // !- `individual`: This represents a single person. + // !- `group`: This represents a group of people. + // !- `resource`: This represents some resource, e.g. a projector. + // !- `location`: This represents a location. + // !- `other`: This represents some other undefined principal. + Type PrincipalTypeOption `json:"type"` + + // The name of the principal, e.g. `"Jane Doe"`, or `"Room 4B"`. + Name string `json:"name"` + + // A longer description of the principal, for example details about the + // facilities of a resource, or null if no description available. + Description string `json:"description,omitempty"` + + // An email address for the principal, or null if no email is available. + Email string `json:"email,omitempty"` + + // The time zone for this principal, if known. + // + // If not null, the value MUST be a time zone id from the IANA Time Zone Database TZDB. + TimeZone string `json:"timeZone,omitempty"` + + // A map of JMAP capability URIs to domain specific information about the principal in relation + // to that capability, as defined in the document that registered the capability. + Capabilities map[string]any `json:"capabilities,omitempty"` + + // A map of account id to `Account` object for each JMAP Account containing data for + // this principal that the user has access to, or null if none. + Accounts map[string]Account `json:"accounts,omitempty"` +} + +type ShareChangePerson struct { + // The name of the person who made the change. + Name string `json:"name"` + // The email of the person who made the change, or null if no email is available. + Email string `json:"email,omitempty"` + // The id of the Principal corresponding to the person who made the change, or null if no associated principal. + PrincipalId string `json:"principalId,omitempty"` +} + +type ShareNotification struct { + // The id of the `ShareNotification`. + Id string `json:"id"` + + // The time this notification was created. + Created UTCDate `json:"created,omitzero"` + + // Who made the change. + ChangedBy ShareChangePerson `json:"changedBy"` + + // The name of the data type for the object whose permissions have changed, e.g. `Calendar` or `Mailbox`. + ObjectType ObjectType `json:"objectType"` + + // The id of the account where this object exists. + ObjectAccountId string `json:"objectAccountId"` + + // The id of the object that this notification is about. + ObjectId string `json:"objectId"` + + // The name of the object at the time the notification was made. + Name string `json:"name"` + + // The `myRights` property of the object for the user before the change. + OldRights map[string]bool `json:"oldRights,omitempty"` + + // The `myRights` property of the object for the user after the change. + NewRights map[string]bool `json:"newRights,omitempty"` +} + +// TODO unused +type Shareable struct { + // Has the user indicated they wish to see this data? + // + // The initial value for this when data is shared by another user is implementation dependent, + // although data types may give advice on appropriate defaults. + IsSubscribed bool `json:"isSubscribed,omitzero"` + + // The set of permissions the user currently has. + // + // Appropriate permissions are domain specific and must be defined per data type. + MyRights map[string]bool `json:"myRights,omitempty"` + + // A map of principal id to rights to give that principal, or null if not shared with anyone. + // + // The account id for the principal id can be found in the capabilities of the `Account` this object is in. + // + // Users with appropriate permission may set this property to modify who the data is shared with. + // + // The principal that owns the account this data is in MUST NOT be in the set of sharees; their rights are implicit. + ShareWith map[string]map[string]bool `json:"shareWith,omitempty"` +} + +// The Quota is an object that displays the limit set to an account usage. +// +// It then shows as well the current usage in regard to that limit. +type Quota struct { + // The unique identifier for this object. + Id string `json:"id"` + + // The resource type of the quota. + ResourceType ResourceType `json:"resourceType"` + + // The current usage of the defined quota, using the `resourceType` defined as unit of measure. + // + // Computation of this value is handled by the server. + Used uint `json:"used"` + + // The hard limit set by this quota, using the `resourceType` defined as unit of measure. + // + // Objects in scope may not be created or updated if this limit is reached. + HardLimit uint `json:"hardLimit"` + + // The Scope data type is used to represent the entities the quota applies to. + // + // It is defined as a "String" with values from the following set: + // !- `account`: The quota information applies to just the client's account. + // !- `domain`: The quota information applies to all accounts sharing this domain. + // !- `global`: The quota information applies to all accounts belonging to the server. + Scope Scope `json:"scope"` + + // The name of the quota. + // + // Useful for managing quotas and using queries for searching. + Name string `json:"name"` + + // A list of all the type names as defined in the "JMAP Types Names" registry + // (e.g., `Email`, `Calendar`, etc.) to which this quota applies. + // + // This allows the quotas to be assigned to distinct or shared data types. + // + // The server MUST filter out any types for which the client did not request the associated capability + // in the `using` section of the request. + // + // Further, the server MUST NOT return Quota objects for which there are no types recognized by the client. + Types []ObjectType `json:"types,omitempty"` + + // The warn limit set by this quota, using the `resourceType` defined as unit of measure. + // + // It can be used to send a warning to an entity about to reach the hard limit soon, but with no + // action taken yet. + // + // If set, it SHOULD be lower than the `softLimit` (if present and different from null) and the `hardLimit`. + WarnLimit uint `json:"warnLimit,omitzero"` + + // The soft limit set by this quota, using the `resourceType` defined as unit of measure. + // + // It can be used to still allow some operations but refuse some others. + // + // What is allowed or not is up to the server. + // + // For example, it could be used for blocking outgoing events of an entity (sending emails, creating + // calendar events, etc.) while still receiving incoming events (receiving emails, receiving calendars + // events, etc.). + // + // If set, it SHOULD be higher than the `warnLimit` (if present and different from null) but lower + // than the `hardLimit`. + SoftLimit uint `json:"softLimit,omitzero"` + + // Arbitrary, free, human-readable description of this quota. + // + // It might be used to explain where the different limits come from and explain the entities and data + // types this quota applies to. + // + // The description MUST be encoded in UTF-8 [RFC3629] as described in [RFC8620], Section 1.5, and + // selected based on an `Accept-Language` header in the request (as defined in [RFC9110], Section 12.5.4) + // or out-of-band information about the user's language or locale. + Description string `json:"description,omitempty"` +} + +// See [RFC8098] for the exact meaning of these different fields. +// +// These fields are defined as case insensitive in [RFC8098] but are case sensitive in this RFC +// and MUST be converted to lowercase by "MDN/parse". +type Disposition struct { + ActionMode ActionMode `json:"actionMode,omitempty"` + SendingMode SendingMode `json:"sendingMode,omitempty"` + Type DispositionTypeOption `json:"type,omitempty"` +} + +// Message Disposition Notifications (MDNs) are defined in [RFC8098] and are used as "read receipts", +// "acknowledgements", or "receipt notifications". +// +// A client can come across MDNs in different ways: +// 1. When receiving an email message, an MDN can be sent to the sender. This specification defines an `MDN/send` method to cover this case. +// 2. When sending an email message, an MDN can be requested. This must be done with the help of a header field, as already specified by [RFC8098]; +// the header field can already be handled by guidance in [RFC8621]. +// 3. When receiving an MDN, the MDN could be related to an existing sent message. This is already covered by [RFC8621] in the +// `EmailSubmission` object. A client might want to display detailed information about a received MDN. +// This specification defines an `MDN/parse` method to cover this case. +type MDN struct { + // The `Email` id of the received message to which this MDN is related. + // + // This property MUST NOT be null for `MDN/send` but MAY be null in the response from the `MDN/parse` method. + ForEmailId string `json:"forEmailId,omitempty"` + + // The subject used as `Subject` header field for this MDN. + Subject string `json:"subject,omitempty"` + + // The human-readable part of the MDN, as plain text. + TextBody string `json:"textBody,omitempty"` + + // If true, the content of the original message will appear in the third component of the `multipart/report` generated + // for the MDN. + // + // See [RFC8098] for details and security considerations. + IncludeOriginalMessage bool `json:"includeOriginalMessage,omitzero"` + + // The name of the Mail User Agent (MUA) creating this MDN. + // + // It is used to build the MDN report part of the MDN. + // + // Note that a null value may have better privacy properties. + ReportingUA string `json:"reportingUA,omitempty"` + + // The object containing the diverse MDN disposition options. + Disposition Disposition `json:"disposition"` + + // The name of the gateway or Message Transfer Agent (MTA) that translated a foreign (non-Internet) + // message disposition notification into this MDN (server-set). + MdnGateway string `json:"mdnGateway,omitempty"` + + // The original recipient address as specified by the sender of the message for which the MDN is being issued (server-set). + OriginalRecipient string `json:"originalRecipient,omitempty"` + + // The recipient for which the MDN is being issued. + // + // If set, it overrides the value that would be calculated by the server from the `Identity` defined + // in the `MDN/send` method, unless explicitly set by the client. + FinalRecipient string `json:"finalRecipient,omitempty"` + + // The `Message-ID` header field [RFC5322] (not the JMAP id) of the message for which the MDN is being issued. + OriginalMessageId string `json:"originalMessageId,omitempty"` + + // Additional information in the form of text messages when the `error` disposition modifier appears. + Error []string `json:"error,omitempty"` + + // The object where keys are extension-field names, and values are extension-field values (see [RFC8098], Section 3.3). + ExtensionFields map[string]string `json:"extensionFields,omitempty"` +} + +type SendMDN struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // The id of the `Identity` to associate with these MDNs. + // + // The server will use this identity to define the sender of the MDNs and to set the `finalRecipient` field. + IdentityId string `json:"identityId"` + + // A map of the creation id (client specified) to MDN objects. + Send map[string]MDN `json:"send,omitempty"` + + // A map of the id to an object containing properties to update on the `Email` object referenced by the `MDN/send` + // if the sending succeeds. + // + // This will always be a backward reference to the creation id. + OnSuccessUpdateEmail map[string]PatchObject `json:"onSuccessUpdateEmail,omitempty"` +} + +type QuotaGetCommand struct { + AccountId string `json:"accountId"` + Ids []string `json:"ids,omitempty"` +} + +type QuotaGetResponse struct { + AccountId string `json:"accountId"` + State State `json:"state,omitempty"` + List []Quota `json:"list,omitempty"` + NotFound []string `json:"notFound,omitempty"` +} + +type AddressBookGetCommand struct { + AccountId string `json:"accountId"` + Ids []string `json:"ids,omitempty"` +} + +type AddressBookGetResponse struct { + AccountId string `json:"accountId"` + State State `json:"state,omitempty"` + List []AddressBook `json:"list,omitempty"` + NotFound []string `json:"notFound,omitempty"` +} + +type ContactCardComparator struct { + // The name of the property on the objects to compare. + Property string `json:"property,omitempty"` + + // If true, sort in ascending order. + // + // Optional; default value: true. + // + // If false, reverse the comparator’s results to sort in descending order. + IsAscending bool `json:"isAscending,omitempty"` + + // The identifier, as registered in the collation registry defined in [RFC4790], + // for the algorithm to use when comparing the order of strings. + // + // Optional; default is server dependent. + // + // The algorithms the server supports are advertised in the capabilities object returned + // with the Session object. + // + // [RFC4790]: https://www.rfc-editor.org/rfc/rfc4790.html + Collation string `json:"collation,omitempty"` + + // ContactCard-specific: The “created” date on the ContactCard. + Created time.Time `json:"created,omitzero"` + + // ContactCard-specific: The "updated” date on the ContactCard. + Updated time.Time `json:"updated,omitzero"` +} + +type ContactCardFilterElement interface { + _isAContactCardFilterElement() // marker method + IsNotEmpty() bool +} + +type ContactCardFilterCondition struct { + // An AddressBook id. + // + // A card must be in this address book to match the condition. + InAddressBook string `json:"inAddressBook,omitempty"` + + // A card must have this string exactly as its uid to match. + Uid string `json:"uid,omitempty"` + + // A card must have a “members” property that contains this string as one of the uids in the set to match. + HasMember string `json:"hasMember,omitempty"` + + // A card must have a type property that equals this string exactly to match. + Kind string `json:"kind,omitempty"` + + // The “created” date-time of the ContactCard must be before this date-time to match the condition. + CreatedBefore UTCDate `json:"createdBefore,omitzero"` + + // The “created” date-time of the ContactCard must be the same or after this date-time to match the condition. + CreatedAfter UTCDate `json:"createdAfter,omitzero"` + + // The “updated” date-time of the ContactCard must be before this date-time to match the condition. + UpdatedBefore UTCDate `json:"updatedBefore,omitzero"` + + // The “updated” date-time of the ContactCard must be the same or after this date-time to match the condition. + UpdatedAfter UTCDate `json:"updatedAfter,omitzero"` + + // A card matches this condition if the text matches with text in the card. + Text string `json:"text,omitempty"` + + // A card matches this condition if the value of any NameComponent in the “name” property, or the + // “full” property in the “name” property of the card matches the value. + Name string `json:"name,omitempty"` + + // A card matches this condition if the value of a NameComponent with kind “given” inside the “name” property of + // the card matches the value. + NameGiven string `json:"name/given,omitempty"` + + // A card matches this condition if the value of a NameComponent with kind “surname” inside the “name” property + // of the card matches the value. + NameSurname string `json:"name/surname,omitempty"` + + // A card matches this condition if the value of a NameComponent with kind “surname2” inside the “name” property + // of the card matches the value. + NameSurname2 string `json:"name/surname2,omitempty"` + + // A card matches this condition if the “name” of any NickName in the “nickNames” property of the card matches the value. + NickName string `json:"nickName,omitempty"` + + // A card matches this condition if the “name” of any Organization in the “organizations” property of the card + // matches the value. + Organization string `json:"organization,omitempty"` + + // A card matches this condition if the “address” or “label” of any EmailAddress in the “emails” property of the + // card matches the value. + Email string `json:"email,omitempty"` + + // A card matches this condition if the “number” or “label” of any Phone in the “phones” property of the card + // matches the value. + Phone string `json:"phone,omitempty"` + + // A card matches this condition if the “service”, “uri”, “user”, or “label” of any OnlineService in the + // “onlineServices” property of the card matches the value. + OnlineService string `json:"onlineService,omitempty"` + + // A card matches this condition if the value of any StreetComponent in the “street” property, or the “locality”, + // “region”, “country”, or “postcode” property in any Address in the “addresses” property of the card matches the value. + Address string `json:"address,omitempty"` + + // A card matches this condition if the “note” of any Note in the “notes” property of the card matches the value. + Note string `json:"note,omitempty"` +} + +func (f ContactCardFilterCondition) _isAContactCardFilterElement() { +} + +func (f ContactCardFilterCondition) IsNotEmpty() bool { + if len(f.InAddressBook) != 0 { + return true + } + if f.Uid != "" { + return true + } + if f.HasMember != "" { + return true + } + if f.Kind != "" { + return true + } + if f.CreatedBefore != "" { + return true + } + if f.CreatedAfter != "" { + return true + } + if f.UpdatedBefore != "" { + return true + } + if f.UpdatedAfter != "" { + return true + } + if f.Text != "" { + return true + } + if f.Name != "" { + return true + } + if f.NameGiven != "" { + return true + } + if f.NameSurname != "" { + return true + } + if f.NameSurname2 != "" { + return true + } + if f.NickName != "" { + return true + } + if f.Organization != "" { + return true + } + if f.Email != "" { + return true + } + if f.Phone != "" { + return true + } + if f.OnlineService != "" { + return true + } + if f.Address != "" { + return true + } + if f.Note != "" { + return true + } + return false +} + +var _ ContactCardFilterElement = &ContactCardFilterCondition{} + +type ContactCardFilterOperator struct { + Operator FilterOperatorTerm `json:"operator"` + Conditions []ContactCardFilterElement `json:"conditions,omitempty"` +} + +func (o ContactCardFilterOperator) _isAContactCardFilterElement() { +} + +func (o ContactCardFilterOperator) IsNotEmpty() bool { + return len(o.Conditions) > 0 +} + +var _ ContactCardFilterElement = &ContactCardFilterOperator{} + +type ContactCardQueryCommand struct { + AccountId string `json:"accountId"` + + Filter ContactCardFilterElement `json:"filter,omitempty"` + + Sort []ContactCardComparator `json:"sort,omitempty"` + + // The zero-based index of the first id in the full list of results to return. + // + // If a negative value is given, it is an offset from the end of the list. + // Specifically, the negative value MUST be added to the total number of results given + // the filter, and if still negative, it’s clamped to 0. This is now the zero-based + // index of the first id to return. + // + // If the index is greater than or equal to the total number of objects in the results + // list, then the ids array in the response will be empty, but this is not an error. + Position uint `json:"position,omitempty"` + + // An Email id. + // + // If supplied, the position argument is ignored. + // The index of this id in the results will be used in combination with the anchorOffset + // argument to determine the index of the first result to return. + Anchor string `json:"anchor,omitempty"` + + // The index of the first result to return relative to the index of the anchor, + // if an anchor is given. + // + // Default: 0. + // + // This MAY be negative. + // + // For example, -1 means the Email immediately preceding the anchor is the first result in + // the list returned. + AnchorOffset int `json:"anchorOffset,omitzero"` + + // The maximum number of results to return. + // + // If null, no limit presumed. + // The server MAY choose to enforce a maximum limit argument. + // In this case, if a greater value is given (or if it is null), the limit is clamped + // to the maximum; the new limit is returned with the response so the client is aware. + // + // If a negative value is given, the call MUST be rejected with an invalidArguments error. + Limit uint `json:"limit,omitempty"` + + // Does the client wish to know the total number of results in the query? + // + // This may be slow and expensive for servers to calculate, particularly with complex filters, + // so clients should take care to only request the total when needed. + CalculateTotal bool `json:"calculateTotal,omitempty"` +} + +type ContactCardQueryResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A string encoding the current state of the query on the server. + // + // This string MUST change if the results of the query (i.e., the matching ids and their sort order) have changed. + // The queryState string MAY change if something has changed on the server, which means the results may have changed + // but the server doesn’t know for sure. + // + // The queryState string only represents the ordered list of ids that match the particular query (including its sort/filter). + // There is no requirement for it to change if a property on an object matching the query changes but the query results are unaffected + // (indeed, it is more efficient if the queryState string does not change in this case). + // + // The queryState string only has meaning when compared to future responses to a query with the same type/sort/filter or when used with + // /queryChanges to fetch changes. + // + // Should a client receive back a response with a different queryState string to a previous call, it MUST either throw away the currently + // cached query and fetch it again (note, this does not require fetching the records again, just the list of ids) or call + // Email/queryChanges to get the difference. + QueryState State `json:"queryState"` + + // This is true if the server supports calling ContactCard/queryChanges with these filter/sort parameters. + // + // Note, this does not guarantee that the ContactCard/queryChanges call will succeed, as it may only be possible for a limited time + // afterwards due to server internal implementation details. + CanCalculateChanges bool `json:"canCalculateChanges"` + + // The zero-based index of the first result in the ids array within the complete list of query results. + Position uint `json:"position"` + + // The list of ids for each ContactCard in the query results, starting at the index given by the position argument of this + // response and continuing until it hits the end of the results or reaches the limit number of ids. + // + // If position is >= total, this MUST be the empty list. + Ids []string `json:"ids"` + + // The total number of ContactCards in the results (given the filter). + // + // Only if requested. + // + // This argument MUST be omitted if the calculateTotal request argument is not true. + Total uint `json:"total,omitempty,omitzero"` + + // The limit enforced by the server on the maximum number of results to return (if set by the server). + // + // This is only returned if the server set a limit or used a different limit than that given in the request. + Limit uint `json:"limit,omitempty,omitzero"` +} + +type ContactCardGetCommand struct { + // The ids of the ContactCard objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that + // data type and the number of records does not exceed the maxObjectsInGet limit. + Ids []string `json:"ids,omitempty"` + + // The id of the account to use. + AccountId string `json:"accountId"` + + // If supplied, only the properties listed in the array are returned for each ContactCard object. + // + // The id property of the object is always returned, even if not explicitly requested. + // + // If an invalid property is requested, the call MUST be rejected with an invalidArguments error. + Properties []string `json:"properties,omitempty"` +} + +type ContactCardGetRefCommand struct { + // The ids of the ContactCard objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that + // data type and the number of records does not exceed the maxObjectsInGet limit. + IdsRef *ResultReference `json:"#ids,omitempty"` + + // The id of the account to use. + AccountId string `json:"accountId"` + + // If supplied, only the properties listed in the array are returned for each ContactCard object. + // + // The id property of the object is always returned, even if not explicitly requested. + // + // If an invalid property is requested, the call MUST be rejected with an invalidArguments error. + Properties []string `json:"properties,omitempty"` +} + +type ContactCardGetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A (preferably short) string representing the state on the server for all the data of this type + // in the account (not just the objects returned in this call). + // + // If the data changes, this string MUST change. + // If the Email data is unchanged, servers SHOULD return the same state string on subsequent requests for this data type. + State State `json:"state"` + + // An array of the ContactCard objects requested. + // + // This is the empty array if no objects were found or if the ids argument passed in was also an empty array. + // + // The results MAY be in a different order to the ids in the request arguments. + // + // If an identical id is included more than once in the request, the server MUST only include it once in either + // the list or the notFound argument of the response. + List []jscontact.ContactCard `json:"list"` + + // This array contains the ids passed to the method for records that do not exist. + // + // The array is empty if all requested ids were found or if the ids argument passed in was either null or an empty array. + NotFound []any `json:"notFound"` +} + +type ContactCardUpdate map[string]any + +type ContactCardSetCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // This is a state string as returned by the `ContactCard/get` method. + // + // If supplied, the string must match the current state; otherwise, the method will be aborted and a + // `stateMismatch` error returned. + // + // If null, any changes will be applied to the current state. + IfInState string `json:"ifInState,omitempty"` + + // A map of a creation id (a temporary id set by the client) to ContactCard objects, + // or null if no objects are to be created. + // + // The ContactCard object type definition may define default values for properties. + // + // Any such property may be omitted by the client. + // + // The client MUST omit any properties that may only be set by the server. + Create map[string]jscontact.ContactCard `json:"create,omitempty"` + + // A map of an id to a `Patch` object to apply to the current Email object with that id, + // or null if no objects are to be updated. + // + // A `PatchObject` is of type `String[*]` and represents an unordered set of patches. + // + // The keys are a path in JSON Pointer Format [@!RFC6901], with an implicit leading `/` (i.e., prefix each key + // with `/` before applying the JSON Pointer evaluation algorithm). + // + // All paths MUST also conform to the following restrictions; if there is any violation, the update + // MUST be rejected with an `invalidPatch` error: + // !- The pointer MUST NOT reference inside an array (i.e., you MUST NOT insert/delete from an array; the array MUST be replaced in its entirety instead). + // !- All parts prior to the last (i.e., the value after the final slash) MUST already exist on the object being patched. + // !- There MUST NOT be two patches in the `PatchObject` where the pointer of one is the prefix of the pointer of the other, e.g., `"alerts/1/offset"` and `"alerts"`. + // + // The value associated with each pointer determines how to apply that patch: + // !- If null, set to the default value if specified for this property; otherwise, remove the property from the patched object. If the key is not present in the parent, this a no-op. + // !- Anything else: The value to set for this property (this may be a replacement or addition to the object being patched). + // + // Any server-set properties MAY be included in the patch if their value is identical to the current server value + // (before applying the patches to the object). Otherwise, the update MUST be rejected with an `invalidProperties` `SetError`. + // + // This patch definition is designed such that an entire Email object is also a valid `PatchObject`. + // + // The client may choose to optimise network usage by just sending the diff or may send the whole object; the server + // processes it the same either way. + Update map[string]ContactCardUpdate `json:"update,omitempty"` + + // A list of ids for ContactCard objects to permanently delete, or null if no objects are to be destroyed. + Destroy []string `json:"destroy,omitempty"` +} + +type ContactCardSetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // The state string that would have been returned by ContactCard/get before making the + // requested changes, or null if the server doesn’t know what the previous state + // string was. + OldState State `json:"oldState,omitempty"` + + // The state string that will now be returned by Email/get. + NewState State `json:"newState"` + + // A map of the creation id to an object containing any properties of the created Email object + // that were not sent by the client. + // + // This includes all server-set properties (such as the id in most object types) and any properties + // that were omitted by the client and thus set to a default by the server. + // + // This argument is null if no ContactCard objects were successfully created. + Created map[string]*jscontact.ContactCard `json:"created,omitempty"` + + // The keys in this map are the ids of all Emails that were successfully updated. + // + // The value for each id is an ContactCard object containing any property that changed in a way not + // explicitly requested by the PatchObject sent to the server, or null if none. + // + // This lets the client know of any changes to server-set or computed properties. + // + // This argument is null if no ContactCard objects were successfully updated. + Updated map[string]*jscontact.ContactCard `json:"updated,omitempty"` + + // A list of ContactCard ids for records that were successfully destroyed, or null if none. + Destroyed []string `json:"destroyed,omitempty"` + + // A map of the creation id to a SetError object for each record that failed to be created, + // or null if all successful. + NotCreated map[string]SetError `json:"notCreated,omitempty"` + + // A map of the ContactCard id to a SetError object for each record that failed to be updated, + // or null if all successful. + NotUpdated map[string]SetError `json:"notUpdated,omitempty"` + + // A map of the ContactCard id to a SetError object for each record that failed to be destroyed, + // or null if all successful. + NotDestroyed map[string]SetError `json:"notDestroyed,omitempty"` +} + +type CalendarEventParseCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // The ids of the blobs to parse + BlobIds []string `json:"blobIds,omitempty"` + + // If supplied, only the properties listed in the array are returned for each CalendarEvent object. + // + // If omitted, defaults to all the properties. + Properties []string `json:"properties,omitempty"` +} + +type CalendarEventParseResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A map of blob ids to parsed CalendarEvent objects representations for each successfully + // parsed blob, or null if none. + Parsed map[string][]CalendarEvent `json:"parsed,omitempty"` + + // A list of blob ids given that could not be found, or null if none. + NotFound []string `json:"notFound,omitempty"` + + // A list of blob ids given that corresponded to blobs that could not be parsed as + // CalendarEvents, or null if none. + NotParsable []string `json:"notParsable,omitempty"` +} + +type CalendarGetCommand struct { + AccountId string `json:"accountId"` + Ids []string `json:"ids,omitempty"` +} + +type CalendarGetResponse struct { + AccountId string `json:"accountId"` + State State `json:"state,omitempty"` + List []Calendar `json:"list,omitempty"` + NotFound []string `json:"notFound,omitempty"` +} + +type CalendarEventComparator struct { + // The name of the property on the objects to compare. + Property string `json:"property,omitempty"` + + // If true, sort in ascending order. + // + // Optional; default value: true. + // + // If false, reverse the comparator’s results to sort in descending order. + IsAscending bool `json:"isAscending,omitempty"` + + // The identifier, as registered in the collation registry defined in [RFC4790], + // for the algorithm to use when comparing the order of strings. + // + // Optional; default is server dependent. + // + // The algorithms the server supports are advertised in the capabilities object returned + // with the Session object. + // + // [RFC4790]: https://www.rfc-editor.org/rfc/rfc4790.html + Collation string `json:"collation,omitempty"` + + // CalendarEvent-specific: If true, the server will expand any recurring event. + // + // If true, the filter MUST be just a FilterCondition (not a FilterOperator) and MUST include both + // a “before” and “after” property. This ensures the server is not asked to return an infinite number of results. + // default: false + ExpandRecurrences bool `json:"expandRecurrences,omitzero"` + + // CalendarEvent-specific: The time zone for before/after filter conditions. + // default: “Etc/UTC” + TimeZone string `json:"timeZone,omitempty"` +} + +type CalendarEventFilterElement interface { + _isACalendarEventFilterElement() // marker method + IsNotEmpty() bool +} + +type CalendarEventFilterCondition struct { + // A calendar id. + // An event must be in this calendar to match the condition. + InCalendar string `json:"inCalendar,omitempty"` + + // The end of the event, or any recurrence of the event, in the time zone given as + // the timeZone argument, must be after this date to match the condition. + After LocalDate `json:"after,omitzero"` + + // The start of the event, or any recurrence of the event, in the time zone given + // as the timeZone argument, must be before this date to match the condition. + Before LocalDate `json:"before,omitzero"` + + // Looks for the text in the title, description, locations (matching name/description), + // participants (matching name/email) and any other textual properties of the event + // or any recurrence of the event. + Text string `json:"text,omitempty"` + + // Looks for the text in the title property of the event, or the overridden title + // property of a recurrence. + Title string `json:"title,omitempty"` + + // Looks for the text in the description property of the event, or the overridden + // description property of a recurrence. + Description string `json:"description,omitempty"` + + // Looks for the text in the locations property of the event (matching name/description + // of a location), or the overridden locations property of a recurrence. + Location string `json:"location,omitempty"` + + // Looks for the text in the name or email fields of a participant in the participants + // property of the event, or the overridden participants property of a recurrence, + // where the participant has a role of “owner”. + Owner string `json:"owner,omitempty"` + + // Looks for the text in the name or email fields of a participant in the participants + // property of the event, or the overridden participants property of a recurrence, + // where the participant has a role of “attendee”. + Attendee string `json:"attendee,omitempty"` + + // Must match. If owner/attendee condition, status must be of that participant. Otherwise any. + ParticipationStatus string `json:"participationStatus,omitempty"` + + // The uid of the event is exactly the given string. + Uid string `json:"uid,omitempty"` +} + +func (f CalendarEventFilterCondition) _isACalendarEventFilterElement() { +} + +func (f CalendarEventFilterCondition) IsNotEmpty() bool { + if f.InCalendar != "" { + return true + } + if f.After != "" { + return true + } + if f.Before != "" { + return true + } + if f.Text != "" { + return true + } + if f.Title != "" { + return true + } + if f.Description != "" { + return true + } + if f.Location != "" { + return true + } + if f.Owner != "" { + return true + } + if f.Attendee != "" { + return true + } + if f.ParticipationStatus != "" { + return true + } + if f.Uid != "" { + return true + } + return false +} + +var _ CalendarEventFilterElement = &CalendarEventFilterCondition{} + +type CalendarEventFilterOperator struct { + Operator FilterOperatorTerm `json:"operator"` + Conditions []CalendarEventFilterElement `json:"conditions,omitempty"` +} + +func (o CalendarEventFilterOperator) _isACalendarEventFilterElement() { +} + +func (o CalendarEventFilterOperator) IsNotEmpty() bool { + return len(o.Conditions) > 0 +} + +var _ CalendarEventFilterElement = &CalendarEventFilterOperator{} + +type CalendarEventQueryCommand struct { + AccountId string `json:"accountId"` + + Filter CalendarEventFilterElement `json:"filter,omitempty"` + + Sort []CalendarEventComparator `json:"sort,omitempty"` + + // The zero-based index of the first id in the full list of results to return. + // + // If a negative value is given, it is an offset from the end of the list. + // Specifically, the negative value MUST be added to the total number of results given + // the filter, and if still negative, it’s clamped to 0. This is now the zero-based + // index of the first id to return. + // + // If the index is greater than or equal to the total number of objects in the results + // list, then the ids array in the response will be empty, but this is not an error. + Position uint `json:"position,omitempty"` + + // An Email id. + // + // If supplied, the position argument is ignored. + // The index of this id in the results will be used in combination with the anchorOffset + // argument to determine the index of the first result to return. + Anchor string `json:"anchor,omitempty"` + + // The index of the first result to return relative to the index of the anchor, + // if an anchor is given. + // + // Default: 0. + // + // This MAY be negative. + // + // For example, -1 means the Email immediately preceding the anchor is the first result in + // the list returned. + AnchorOffset int `json:"anchorOffset,omitzero"` + + // The maximum number of results to return. + // + // If null, no limit presumed. + // The server MAY choose to enforce a maximum limit argument. + // In this case, if a greater value is given (or if it is null), the limit is clamped + // to the maximum; the new limit is returned with the response so the client is aware. + // + // If a negative value is given, the call MUST be rejected with an invalidArguments error. + Limit uint `json:"limit,omitempty"` + + // Does the client wish to know the total number of results in the query? + // + // This may be slow and expensive for servers to calculate, particularly with complex filters, + // so clients should take care to only request the total when needed. + CalculateTotal bool `json:"calculateTotal,omitempty"` +} + +type CalendarEventQueryResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A string encoding the current state of the query on the server. + // + // This string MUST change if the results of the query (i.e., the matching ids and their sort order) have changed. + // The queryState string MAY change if something has changed on the server, which means the results may have changed + // but the server doesn’t know for sure. + // + // The queryState string only represents the ordered list of ids that match the particular query (including its sort/filter). + // There is no requirement for it to change if a property on an object matching the query changes but the query results are unaffected + // (indeed, it is more efficient if the queryState string does not change in this case). + // + // The queryState string only has meaning when compared to future responses to a query with the same type/sort/filter or when used with + // /queryChanges to fetch changes. + // + // Should a client receive back a response with a different queryState string to a previous call, it MUST either throw away the currently + // cached query and fetch it again (note, this does not require fetching the records again, just the list of ids) or call + // CalendarEvent/queryChanges to get the difference. + QueryState State `json:"queryState"` + + // This is true if the server supports calling CalendarEvent/queryChanges with these filter/sort parameters. + // + // Note, this does not guarantee that the CalendarEvent/queryChanges call will succeed, as it may only be possible for a limited time + // afterwards due to server internal implementation details. + CanCalculateChanges bool `json:"canCalculateChanges"` + + // The zero-based index of the first result in the ids array within the complete list of query results. + Position uint `json:"position"` + + // The list of ids for each ContactCard in the query results, starting at the index given by the position argument of this + // response and continuing until it hits the end of the results or reaches the limit number of ids. + // + // If position is >= total, this MUST be the empty list. + Ids []string `json:"ids"` + + // The total number of CalendarEvents in the results (given the filter). + // + // Only if requested. + // + // This argument MUST be omitted if the calculateTotal request argument is not true. + Total uint `json:"total,omitempty,omitzero"` + + // The limit enforced by the server on the maximum number of results to return (if set by the server). + // + // This is only returned if the server set a limit or used a different limit than that given in the request. + Limit uint `json:"limit,omitempty,omitzero"` +} + +type CalendarEventGetCommand struct { + // The ids of the CalendarEvent objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that + // data type and the number of records does not exceed the maxObjectsInGet limit. + Ids []string `json:"ids,omitempty"` + + // The id of the account to use. + AccountId string `json:"accountId"` + + // If supplied, only the properties listed in the array are returned for each CalendarEvent object. + // + // The id property of the object is always returned, even if not explicitly requested. + // + // If an invalid property is requested, the call MUST be rejected with an invalidArguments error. + Properties []string `json:"properties,omitempty"` +} + +type CalendarEventGetRefCommand struct { + // The ids of the CalendarEvent objects to return. + // + // If null, then all records of the data type are returned, if this is supported for that + // data type and the number of records does not exceed the maxObjectsInGet limit. + IdsRef *ResultReference `json:"#ids,omitempty"` + + // The id of the account to use. + AccountId string `json:"accountId"` + + // If supplied, only the properties listed in the array are returned for each CalendarEvent object. + // + // The id property of the object is always returned, even if not explicitly requested. + // + // If an invalid property is requested, the call MUST be rejected with an invalidArguments error. + Properties []string `json:"properties,omitempty"` +} + +type CalendarEventGetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // A (preferably short) string representing the state on the server for all the data of this type + // in the account (not just the objects returned in this call). + // + // If the data changes, this string MUST change. + // If the Email data is unchanged, servers SHOULD return the same state string on subsequent requests for this data type. + State State `json:"state"` + + // An array of the CalendarEvent objects requested. + // + // This is the empty array if no objects were found or if the ids argument passed in was also an empty array. + // + // The results MAY be in a different order to the ids in the request arguments. + // + // If an identical id is included more than once in the request, the server MUST only include it once in either + // the list or the notFound argument of the response. + List []CalendarEvent `json:"list"` + + // This array contains the ids passed to the method for records that do not exist. + // + // The array is empty if all requested ids were found or if the ids argument passed in was either null or an empty array. + NotFound []any `json:"notFound"` +} + +type CalendarEventUpdate map[string]any + +type CalendarEventSetCommand struct { + // The id of the account to use. + AccountId string `json:"accountId"` + + // This is a state string as returned by the `CalendarEvent/get` method. + // + // If supplied, the string must match the current state; otherwise, the method will be aborted and a + // `stateMismatch` error returned. + // + // If null, any changes will be applied to the current state. + IfInState string `json:"ifInState,omitempty"` + + // A map of a creation id (a temporary id set by the client) to CalendarEvent objects, + // or null if no objects are to be created. + // + // The CalendarEvent object type definition may define default values for properties. + // + // Any such property may be omitted by the client. + // + // The client MUST omit any properties that may only be set by the server. + Create map[string]CalendarEvent `json:"create,omitempty"` + + // A map of an id to a `Patch` object to apply to the current Email object with that id, + // or null if no objects are to be updated. + // + // A `PatchObject` is of type `String[*]` and represents an unordered set of patches. + // + // The keys are a path in JSON Pointer Format [@!RFC6901], with an implicit leading `/` (i.e., prefix each key + // with `/` before applying the JSON Pointer evaluation algorithm). + // + // All paths MUST also conform to the following restrictions; if there is any violation, the update + // MUST be rejected with an `invalidPatch` error: + // !- The pointer MUST NOT reference inside an array (i.e., you MUST NOT insert/delete from an array; the array MUST be replaced in its entirety instead). + // !- All parts prior to the last (i.e., the value after the final slash) MUST already exist on the object being patched. + // !- There MUST NOT be two patches in the `PatchObject` where the pointer of one is the prefix of the pointer of the other, e.g., `"alerts/1/offset"` and `"alerts"`. + // + // The value associated with each pointer determines how to apply that patch: + // !- If null, set to the default value if specified for this property; otherwise, remove the property from the patched object. If the key is not present in the parent, this a no-op. + // !- Anything else: The value to set for this property (this may be a replacement or addition to the object being patched). + // + // Any server-set properties MAY be included in the patch if their value is identical to the current server value + // (before applying the patches to the object). Otherwise, the update MUST be rejected with an `invalidProperties` `SetError`. + // + // This patch definition is designed such that an entire Email object is also a valid `PatchObject`. + // + // The client may choose to optimise network usage by just sending the diff or may send the whole object; the server + // processes it the same either way. + Update map[string]CalendarEventUpdate `json:"update,omitempty"` + + // A list of ids for CalendarEvent objects to permanently delete, or null if no objects are to be destroyed. + Destroy []string `json:"destroy,omitempty"` +} + +type CalendarEventSetResponse struct { + // The id of the account used for the call. + AccountId string `json:"accountId"` + + // The state string that would have been returned by CalendarEvent/get before making the + // requested changes, or null if the server doesn’t know what the previous state + // string was. + OldState State `json:"oldState,omitempty"` + + // The state string that will now be returned by Email/get. + NewState State `json:"newState"` + + // A map of the creation id to an object containing any properties of the created Email object + // that were not sent by the client. + // + // This includes all server-set properties (such as the id in most object types) and any properties + // that were omitted by the client and thus set to a default by the server. + // + // This argument is null if no CalendarEvent objects were successfully created. + Created map[string]*CalendarEvent `json:"created,omitempty"` + + // The keys in this map are the ids of all CalendarEvents that were successfully updated. + // + // The value for each id is an CalendarEvent object containing any property that changed in a way not + // explicitly requested by the PatchObject sent to the server, or null if none. + // + // This lets the client know of any changes to server-set or computed properties. + // + // This argument is null if no CalendarEvent objects were successfully updated. + Updated map[string]*CalendarEvent `json:"updated,omitempty"` + + // A list of CalendarEvent ids for records that were successfully destroyed, or null if none. + Destroyed []string `json:"destroyed,omitempty"` + + // A map of the creation id to a SetError object for each record that failed to be created, + // or null if all successful. + NotCreated map[string]SetError `json:"notCreated,omitempty"` + + // A map of the ContactCard id to a SetError object for each record that failed to be updated, + // or null if all successful. + NotUpdated map[string]SetError `json:"notUpdated,omitempty"` + + // A map of the CalendarEvent id to a SetError object for each record that failed to be destroyed, + // or null if all successful. + NotDestroyed map[string]SetError `json:"notDestroyed,omitempty"` +} + +type ErrorResponse struct { + Type string `json:"type"` + Description string `json:"description,omitempty"` +} + +const ( + ErrorCommand Command = "error" // only occurs in responses + CommandBlobGet Command = "Blob/get" + CommandBlobUpload Command = "Blob/upload" + CommandEmailGet Command = "Email/get" + CommandEmailQuery Command = "Email/query" + CommandEmailChanges Command = "Email/changes" + CommandEmailSet Command = "Email/set" + CommandEmailImport Command = "Email/import" + CommandEmailSubmissionGet Command = "EmailSubmission/get" + CommandEmailSubmissionSet Command = "EmailSubmission/set" + CommandThreadGet Command = "Thread/get" + CommandMailboxGet Command = "Mailbox/get" + CommandMailboxSet Command = "Mailbox/set" + CommandMailboxQuery Command = "Mailbox/query" + CommandMailboxChanges Command = "Mailbox/changes" + CommandIdentityGet Command = "Identity/get" + CommandIdentitySet Command = "Identity/set" + CommandVacationResponseGet Command = "VacationResponse/get" + CommandVacationResponseSet Command = "VacationResponse/set" + CommandSearchSnippetGet Command = "SearchSnippet/get" + CommandQuotaGet Command = "Quota/get" + CommandAddressBookGet Command = "AddressBook/get" + CommandContactCardQuery Command = "ContactCard/query" + CommandContactCardGet Command = "ContactCard/get" + CommandContactCardSet Command = "ContactCard/set" + CommandCalendarEventParse Command = "CalendarEvent/parse" + CommandCalendarGet Command = "Calendar/get" + CommandCalendarEventQuery Command = "CalendarEvent/query" + CommandCalendarEventGet Command = "CalendarEvent/get" + CommandCalendarEventSet Command = "CalendarEvent/set" +) + +var CommandResponseTypeMap = map[Command]func() any{ + ErrorCommand: func() any { return ErrorResponse{} }, + CommandBlobGet: func() any { return BlobGetResponse{} }, + CommandBlobUpload: func() any { return BlobUploadResponse{} }, + CommandMailboxQuery: func() any { return MailboxQueryResponse{} }, + CommandMailboxGet: func() any { return MailboxGetResponse{} }, + CommandMailboxSet: func() any { return MailboxSetResponse{} }, + CommandMailboxChanges: func() any { return MailboxChangesResponse{} }, + CommandEmailQuery: func() any { return EmailQueryResponse{} }, + CommandEmailChanges: func() any { return EmailChangesResponse{} }, + CommandEmailGet: func() any { return EmailGetResponse{} }, + CommandEmailSet: func() any { return EmailSetResponse{} }, + CommandEmailSubmissionGet: func() any { return EmailSubmissionGetResponse{} }, + CommandEmailSubmissionSet: func() any { return EmailSubmissionSetResponse{} }, + CommandThreadGet: func() any { return ThreadGetResponse{} }, + CommandIdentityGet: func() any { return IdentityGetResponse{} }, + CommandIdentitySet: func() any { return IdentitySetResponse{} }, + CommandVacationResponseGet: func() any { return VacationResponseGetResponse{} }, + CommandVacationResponseSet: func() any { return VacationResponseSetResponse{} }, + CommandSearchSnippetGet: func() any { return SearchSnippetGetResponse{} }, + CommandQuotaGet: func() any { return QuotaGetResponse{} }, + CommandAddressBookGet: func() any { return AddressBookGetResponse{} }, + CommandContactCardQuery: func() any { return ContactCardQueryResponse{} }, + CommandContactCardGet: func() any { return ContactCardGetResponse{} }, + CommandContactCardSet: func() any { return ContactCardSetResponse{} }, + CommandCalendarEventParse: func() any { return CalendarEventParseResponse{} }, + CommandCalendarGet: func() any { return CalendarGetResponse{} }, + CommandCalendarEventQuery: func() any { return CalendarEventQueryResponse{} }, + CommandCalendarEventGet: func() any { return CalendarEventGetResponse{} }, + CommandCalendarEventSet: func() any { return CalendarEventSetResponse{} }, +} diff --git a/pkg/jmap/jmap_session.go b/pkg/jmap/jmap_session.go new file mode 100644 index 0000000000..db20758d59 --- /dev/null +++ b/pkg/jmap/jmap_session.go @@ -0,0 +1,138 @@ +package jmap + +import ( + "errors" + "fmt" + "net/url" +) + +type SessionEventListener interface { + OnSessionOutdated(session *Session, newSessionState SessionState) +} + +// Cached user related information +// +// This information is typically retrieved once (or at least for a certain period of time) from the +// JMAP well-known endpoint of Stalwart and then kept in cache to avoid the performance cost of +// retrieving it over and over again. +// +// This is really only needed due to the Graph API limitations, since ideally, the account ID should +// be passed as a request parameter by the UI, in order to support a user having multiple accounts. +// +// Keeping track of the JMAP URL might be useful though, in case of Stalwart sharding strategies making +// use of that, by providing different URLs for JMAP on a per-user basis, and that is not something +// we would want to query before every single JMAP request. On the other hand, that then also creates +// a risk of going out-of-sync, e.g. if a node is down and the user is reassigned to a different node. +// There might be webhooks to subscribe to in Stalwart to be notified of such situations, in which case +// the Session needs to be removed from the cache. +// +// The Username is only here for convenience, it could just as well be passed as a separate parameter +// instead of being part of the Session, since the username is always part of the request (typically in +// the authentication token payload.) +type Session struct { + // The name of the user to use to authenticate against Stalwart + Username string + + // The base URL to use for JMAP operations towards Stalwart + JmapUrl url.URL + // An identifier of the JmapUrl to use in metrics and tracing + JmapEndpoint string + + // The upload URL template + UploadUrlTemplate string + // An identifier of the UploadUrlTemplate to use in metrics and tracing + UploadEndpoint string + + // The upload URL template + DownloadUrlTemplate string + // An identifier of the DownloadUrlTemplate to use in metrics and tracing + DownloadEndpoint string + + WebsocketUrl *url.URL + SupportsWebsocketPush bool + WebsocketEndpoint string + + SessionResponse +} + +var ( + invalidSessionResponseErrorMissingUsername = SimpleError{code: JmapErrorInvalidSessionResponse, err: errors.New("JMAP session response does not provide a username")} + invalidSessionResponseErrorMissingApiUrl = SimpleError{code: JmapErrorInvalidSessionResponse, err: errors.New("JMAP session response does not provide an API URL")} + invalidSessionResponseErrorInvalidApiUrl = SimpleError{code: JmapErrorInvalidSessionResponse, err: errors.New("JMAP session response provides an invalid API URL")} + invalidSessionResponseErrorMissingUploadUrl = SimpleError{code: JmapErrorInvalidSessionResponse, err: errors.New("JMAP session response does not provide an upload URL")} + invalidSessionResponseErrorMissingDownloadUrl = SimpleError{code: JmapErrorInvalidSessionResponse, err: errors.New("JMAP session response does not provide a download URL")} + invalidSessionResponseErrorInvalidWebsocketUrl = SimpleError{code: JmapErrorInvalidSessionResponse, err: errors.New("JMAP session response provides an invalid Websocket URL")} +) + +// Create a new Session from a SessionResponse. +func newSession(sessionResponse SessionResponse) (Session, Error) { + username := sessionResponse.Username + if username == "" { + return Session{}, invalidSessionResponseErrorMissingUsername + } + apiStr := sessionResponse.ApiUrl + if apiStr == "" { + return Session{}, invalidSessionResponseErrorMissingApiUrl + } + apiUrl, err := url.Parse(apiStr) + if err != nil { + return Session{}, invalidSessionResponseErrorInvalidApiUrl + } + apiEndpoint := endpointOf(apiUrl) + + uploadUrl := sessionResponse.UploadUrl + if uploadUrl == "" { + return Session{}, invalidSessionResponseErrorMissingUploadUrl + } + uploadEndpoint := toEndpoint(uploadUrl) + + downloadUrl := sessionResponse.DownloadUrl + if downloadUrl == "" { + return Session{}, invalidSessionResponseErrorMissingDownloadUrl + } + downloadEndpoint := toEndpoint(downloadUrl) + + var websocketUrl *url.URL = nil + websocketEndpoint := "" + supportsWebsocketPush := false + websocketUrlStr := sessionResponse.Capabilities.Websocket.Url + if websocketUrlStr != "" { + websocketUrl, err = url.Parse(websocketUrlStr) + if err != nil { + return Session{}, invalidSessionResponseErrorInvalidWebsocketUrl + } + supportsWebsocketPush = sessionResponse.Capabilities.Websocket.SupportsPush + websocketEndpoint = endpointOf(websocketUrl) + } + + return Session{ + Username: username, + JmapUrl: *apiUrl, + JmapEndpoint: apiEndpoint, + UploadUrlTemplate: uploadUrl, + UploadEndpoint: uploadEndpoint, + DownloadUrlTemplate: downloadUrl, + DownloadEndpoint: downloadEndpoint, + WebsocketUrl: websocketUrl, + SupportsWebsocketPush: supportsWebsocketPush, + WebsocketEndpoint: websocketEndpoint, + SessionResponse: sessionResponse, + }, nil +} + +func endpointOf(u *url.URL) string { + if u != nil { + return fmt.Sprintf("%s://%s", u.Scheme, u.Host) + } else { + return "" + } +} + +func toEndpoint(str string) string { + u, err := url.Parse(str) + if err == nil { + return endpointOf(u) + } else { + return str + } +} diff --git a/pkg/jmap/jmap_tools.go b/pkg/jmap/jmap_tools.go new file mode 100644 index 0000000000..6dd2f9c50b --- /dev/null +++ b/pkg/jmap/jmap_tools.go @@ -0,0 +1,336 @@ +package jmap + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "reflect" + "slices" + "strings" + "sync" + "time" + + "github.com/mitchellh/mapstructure" + "github.com/opencloud-eu/opencloud/pkg/jscalendar" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +type eventListeners[T any] struct { + listeners []T + m sync.Mutex +} + +func (e *eventListeners[T]) add(listener T) { + e.m.Lock() + defer e.m.Unlock() + e.listeners = append(e.listeners, listener) +} + +func (e *eventListeners[T]) signal(signal func(T)) { + e.m.Lock() + defer e.m.Unlock() + for _, listener := range e.listeners { + signal(listener) + } +} + +func newEventListeners[T any]() *eventListeners[T] { + return &eventListeners[T]{ + listeners: []T{}, + } +} + +// Create an identifier to use as a method call ID, from the specified accountId and additional +// tag, to make something unique within that API request. +func mcid(accountId string, tag string) string { + // https://jmap.io/spec-core.html#the-invocation-data-type + // May be any string of data: + // An arbitrary string from the client to be echoed back with the responses emitted by that method + // call (a method may return 1 or more responses, as it may make implicit calls to other methods; + // all responses initiated by this method call get the same method call id in the response). + return accountId + ":" + tag +} + +func command[T any](api ApiClient, + logger *log.Logger, + ctx context.Context, + session *Session, + sessionOutdatedHandler func(session *Session, newState SessionState), + request Request, + acceptLanguage string, + mapper func(body *Response) (T, State, Error)) (T, SessionState, State, Language, Error) { + + responseBody, language, jmapErr := api.Command(ctx, logger, session, request, acceptLanguage) + if jmapErr != nil { + var zero T + return zero, "", "", language, jmapErr + } + + var response Response + err := json.Unmarshal(responseBody, &response) + if err != nil { + logger.Error().Err(err).Msgf("failed to deserialize body JSON payload into a %T", response) + var zero T + return zero, "", "", language, SimpleError{code: JmapErrorDecodingResponseBody, err: err} + } + + if response.SessionState != session.State { + if sessionOutdatedHandler != nil { + sessionOutdatedHandler(session, response.SessionState) + } + } + + // search for an "error" response + // https://jmap.io/spec-core.html#method-level-errors + for _, mr := range response.MethodResponses { + if mr.Command == ErrorCommand { + if errorParameters, ok := mr.Parameters.(ErrorResponse); ok { + code := JmapErrorServerFail + switch errorParameters.Type { + case MethodLevelErrorServerUnavailable: + code = JmapErrorServerUnavailable + case MethodLevelErrorServerFail, MethodLevelErrorServerPartialFail: + code = JmapErrorServerFail + case MethodLevelErrorUnknownMethod: + code = JmapErrorUnknownMethod + case MethodLevelErrorInvalidArguments: + code = JmapErrorInvalidArguments + case MethodLevelErrorInvalidResultReference: + code = JmapErrorInvalidResultReference + case MethodLevelErrorForbidden: + // there's a quirk here: when referencing an account that exists but that this + // user has no access to, Stalwart returns the 'forbidden' error, but this might + // leak the existence of an account to an attacker -- instead, we deem it safer to + // return a "account does not exist" error instead + if strings.HasPrefix(errorParameters.Description, "You do not have access to account") { + code = JmapErrorAccountNotFound + } else { + code = JmapErrorForbidden + } + case MethodLevelErrorAccountNotFound: + code = JmapErrorAccountNotFound + case MethodLevelErrorAccountNotSupportedByMethod: + code = JmapErrorAccountNotSupportedByMethod + case MethodLevelErrorAccountReadOnly: + code = JmapErrorAccountReadOnly + } + msg := fmt.Sprintf("found method level error in response '%v', type: '%v', description: '%v'", mr.Tag, errorParameters.Type, errorParameters.Description) + err = errors.New(msg) + logger.Warn().Int("code", code).Str("type", errorParameters.Type).Msg(msg) + var zero T + return zero, response.SessionState, "", language, SimpleError{code: code, err: err} + } else { + code := JmapErrorUnspecifiedType + msg := fmt.Sprintf("found method level error in response '%v'", mr.Tag) + err := errors.New(msg) + logger.Warn().Int("code", code).Msg(msg) + var zero T + return zero, response.SessionState, "", language, SimpleError{code: code, err: err} + } + } + } + + result, state, jerr := mapper(&response) + sessionState := response.SessionState + return result, sessionState, state, language, jerr +} + +func mapstructStringToTimeHook() mapstructure.DecodeHookFunc { + // mapstruct isn't able to properly map RFC3339 date strings into Time + // objects, which is why we require this custom hook, + // see https://github.com/mitchellh/mapstructure/issues/41 + wanted := reflect.TypeOf(time.Time{}) + return func(from reflect.Type, to reflect.Type, data any) (any, error) { + if to != wanted { + return data, nil + } + switch from.Kind() { + case reflect.String: + return time.Parse(time.RFC3339, data.(string)) + case reflect.Float64: + return time.Unix(0, int64(data.(float64))*int64(time.Millisecond)), nil + case reflect.Int64: + return time.Unix(0, data.(int64)*int64(time.Millisecond)), nil + default: + return data, nil + } + } +} + +func decodeMap(input map[string]any, target any) error { + // https://github.com/mitchellh/mapstructure/issues/41 + decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + Metadata: nil, + DecodeHook: mapstructure.ComposeDecodeHookFunc( + mapstructStringToTimeHook(), + jscalendar.MapstructTriggerHook(), + ), + Result: &target, + ErrorUnused: false, + ErrorUnset: false, + IgnoreUntaggedFields: false, + Squash: true, + }) + if err != nil { + return err + } + return decoder.Decode(input) +} + +func decodeParameters(input any, target any) error { + m, ok := input.(map[string]any) + if !ok { + return fmt.Errorf("decodeParameters: parameters is not a map but a %T", input) + } + return decodeMap(m, target) +} + +func retrieveResponseMatch(data *Response, command Command, tag string) (Invocation, bool) { + for _, inv := range data.MethodResponses { + if command == inv.Command && tag == inv.Tag { + return inv, true + } + } + return Invocation{}, false +} + +func retrieveResponseMatchParameters[T any](logger *log.Logger, data *Response, command Command, tag string, target *T) Error { + match, ok := retrieveResponseMatch(data, command, tag) + if !ok { + err := fmt.Errorf("failed to find JMAP response invocation match for command '%v' and tag '%v'", command, tag) + logger.Error().Msg(err.Error()) + return simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + params := match.Parameters + typedParams, ok := params.(T) + if !ok { + err := fmt.Errorf("JMAP response invocation matches command '%v' and tag '%v' but the type %T does not match the expected %T", command, tag, params, *target) + logger.Error().Msg(err.Error()) + return simpleError(err, JmapErrorInvalidJmapResponsePayload) + } + *target = typedParams + return nil +} + +func (i *Invocation) MarshalJSON() ([]byte, error) { + // JMAP requests have a slightly unusual structure since they are not a JSON object + // but, instead, a three-element array composed of + // 0: the command (e.g. "Email/query") + // 1: the actual payload of the request (structure depends on the command) + // 2: a tag that can be used to identify the matching response payload + // That implementation aspect thus requires us to use a custom marshalling hook. + arr := []any{string(i.Command), i.Parameters, i.Tag} + return json.Marshal(arr) +} + +func (i *Invocation) UnmarshalJSON(bs []byte) error { + // JMAP responses have a slightly unusual structure since they are not a JSON object + // but, instead, a three-element array composed of + // 0: the command (e.g. "Thread/get") this is a response to + // 1: the actual payload of the response (structure depends on the command) + // 2: the tag (same as in the request invocation) + // That implementation aspect thus requires us to use a custom unmarshalling hook. + arr := []any{} + err := json.Unmarshal(bs, &arr) + if err != nil { + return err + } + if len(arr) != 3 { + // JMAP response must really always be an array of three elements + return fmt.Errorf("Invocation array length ought to be 3 but is %d", len(arr)) + } + // The first element in the array is the command: + i.Command = Command(arr[0].(string)) + // The third element in the array is the tag: + i.Tag = arr[2].(string) + + // Due to the dynamic nature of request and response types in JMAP, we + // switch to using mapstruct here to deserialize the payload in the "parameters" + // element of JMAP invocation response arrays, as their expected struct type + // is directly inferred from the command (e.g. "Mailbox/get") + payload := arr[1] + + paramsFactory, ok := CommandResponseTypeMap[i.Command] + if !ok { + return fmt.Errorf("unsupported JMAP operation cannot be unmarshalled: %v", i.Command) + } + params := paramsFactory() + err = decodeParameters(payload, ¶ms) + if err != nil { + return err + } + i.Parameters = params + return nil +} + +func squashState(all map[string]State) State { + return squashStateFunc(all, func(s State) State { return s }) +} + +func squashStateFunc[V any](all map[string]V, mapper func(V) State) State { + n := len(all) + if n == 0 { + return State("") + } + if n == 1 { + for _, v := range all { + return mapper(v) + } + } + + parts := make([]string, n) + sortedKeys := make([]string, n) + i := 0 + for k := range all { + sortedKeys[i] = k + i++ + } + slices.Sort(sortedKeys) + for i, k := range sortedKeys { + if v, ok := all[k]; ok { + parts[i] = k + ":" + string(mapper(v)) + } else { + parts[i] = k + ":" + } + } + return State(strings.Join(parts, ",")) +} + +func squashStateMaps(first map[string]State, second map[string]State) State { + return squashStateFunc(mapPairs(first, second), func(p pair[State, State]) State { + if p.left != nil { + if p.right != nil { + return *p.left + ";" + *p.right + } else { + return *p.left + ";" + } + } else if p.right != nil { + return ";" + *p.right + } else { + return ";" + } + }) +} + +type pair[L any, R any] struct { + left *L + right *R +} + +func mapPairs[K comparable, L, R any](left map[K]L, right map[K]R) map[K]pair[L, R] { + result := map[K]pair[L, R]{} + for k, l := range left { + if r, ok := right[k]; ok { + result[k] = pair[L, R]{left: &l, right: &r} + } else { + result[k] = pair[L, R]{left: &l, right: nil} + } + } + for k, r := range right { + if _, ok := left[k]; !ok { + result[k] = pair[L, R]{left: nil, right: &r} + } + } + return result +} diff --git a/pkg/jmap/jmap_tools_test.go b/pkg/jmap/jmap_tools_test.go new file mode 100644 index 0000000000..dcdbf2ffa4 --- /dev/null +++ b/pkg/jmap/jmap_tools_test.go @@ -0,0 +1,24 @@ +package jmap + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestUnmarshallingError(t *testing.T) { + require := require.New(t) + + responseBody := `{"methodResponses":[["error",{"type":"forbidden","description":"You do not have access to account a"},"a:0"]],"sessionState":"3e25b2a0"}` + var response Response + err := json.Unmarshal([]byte(responseBody), &response) + require.NoError(err) + require.Len(response.MethodResponses, 1) + require.Equal(ErrorCommand, response.MethodResponses[0].Command) + require.Equal("a:0", response.MethodResponses[0].Tag) + require.IsType(ErrorResponse{}, response.MethodResponses[0].Parameters) + er, _ := response.MethodResponses[0].Parameters.(ErrorResponse) + require.Equal("forbidden", er.Type) + require.Equal("You do not have access to account a", er.Description) +} diff --git a/pkg/jscalendar/jscalendar_model.go b/pkg/jscalendar/jscalendar_model.go new file mode 100644 index 0000000000..9bc86711ce --- /dev/null +++ b/pkg/jscalendar/jscalendar_model.go @@ -0,0 +1,2277 @@ +package jscalendar + +import ( + "encoding/json" + "fmt" + "reflect" + "slices" + "time" + + "github.com/mitchellh/mapstructure" +) + +// This is a date-time string with no time zone/offset information. +// +// It is otherwise in the same format as `UTCDateTime`, including fractional seconds. +// +// For example, `2006-01-02T15:04:05` and `2006-01-02T15:04:05.003` are both valid. +/* +type LocalDateTime struct { + time.Time +} +*/ +type LocalDateTime string +type UTCDateTime string + +type TypeOfRelation string +type TypeOfLink string +type TypeOfEvent string +type TypeOfTask string +type TypeOfGroup string +type TypeOfLocation string +type TypeOfVirtualLocation string +type TypeOfRecurrenceRule string +type TypeOfNDay string +type TypeOfParticipant string +type TypeOfAlert string +type TypeOfOffsetTrigger string +type TypeOfAbsoluteTrigger string +type TypeOfTimeZone string +type TypeOfTimeZoneRule string + +type Duration string // TODO +type SignedDuration string // TODO + +type Relationship string +type Display string +type Rel string +type LocationTypeOption string +type VirtualLocationFeature string +type Frequency string +type Skip string +type DayOfWeek string +type FreeBusyStatus string +type Privacy string +type ReplyMethod string +type SendToMethod string +type ParticipantKind string +type Role string +type ParticipationStatus string +type ScheduleAgent string +type Progress string +type RelativeTo string +type AlertAction string +type Status string + +const ( + EventMediaType = "application/jscalendar+json;type=event" + TaskMediaType = "application/jscalendar+json;type=task" + GroupMediaType = "application/jscalendar+json;type=group" + + DefaultDescriptionContentType = "text/plain" + + RelationType = TypeOfRelation("Relation") + LinkType = TypeOfLink("Link") + EventType = TypeOfEvent("Event") + TaskType = TypeOfTask("Task") + GroupType = TypeOfGroup("Group") + LocationType = TypeOfLocation("Location") + VirtualLocationType = TypeOfVirtualLocation("VirtualLocation") + RecurrenceRuleType = TypeOfRecurrenceRule("RecurrenceRule") + NDayType = TypeOfNDay("NDay") + ParticipantType = TypeOfParticipant("Participant") + AlertType = TypeOfAlert("Alert") + OffsetTriggerType = TypeOfOffsetTrigger("OffsetTrigger") + AbsoluteTriggerType = TypeOfAbsoluteTrigger("AbsoluteTrigger") + TimeZoneType = TypeOfTimeZone("TimeZone") + TimeZoneRuleType = TypeOfTimeZoneRule("TimeZoneRule") + + RelationshipFirst = Relationship("first") + RelationshipNext = Relationship("next") + RelationshipChild = Relationship("child") + RelationshipParent = Relationship("parent") + + // Only for Task, JMAP extension: this task depends on the referenced task in some manner. + // + // For example, a task may be blocked waiting on the other, referenced, task. + RelationshipDependsOn = Relationship("depends-on") + + // Only for Task, JMAP extension: the referenced task was cloned from this task. + RelationshipClone = Relationship("clone") + + // Only for Task, JMAP extension: the referenced task is a duplicate of this task. + RelationshipDuplicate = Relationship("duplicate") + + // Only for Task, JMAP extension: the referenced task was the cause for this task. + RelationshipCause = Relationship("cause") + + DisplayBadge = Display("badge") + DisplayGraphic = Display("graphic") + DisplayFullsize = Display("fullsize") + DisplayThumbnail = Display("thumbnail") + + // curl https://www.iana.org/assignments/link-relations/link-relations.xml | xq -x '//record/value'|sort + RelAbout = Rel("about") + RelAcl = Rel("acl") + RelAlternate = Rel("alternate") + RelAmphtml = Rel("amphtml") + RelApiCatalog = Rel("api-catalog") + RelAppendix = Rel("appendix") + RelAppleTouchIcon = Rel("apple-touch-icon") + RelAppleTouchStartupImage = Rel("apple-touch-startup-image") + RelArchives = Rel("archives") + RelAuthor = Rel("author") + RelBlockedBy = Rel("blocked-by") + RelBookmark = Rel("bookmark") + RelC2paManifest = Rel("c2pa-manifest") + RelCanonical = Rel("canonical") + RelChapter = Rel("chapter") + RelCiteAs = Rel("cite-as") + RelCollection = Rel("collection") + RelCompressionDictionary = Rel("compression-dictionary") + RelContents = Rel("contents") + RelConvertedfrom = Rel("convertedfrom") + RelCopyright = Rel("copyright") + RelCreateForm = Rel("create-form") + RelCurrent = Rel("current") + RelDeprecation = Rel("deprecation") + RelDescribedby = Rel("describedby") + RelDescribes = Rel("describes") + RelDisclosure = Rel("disclosure") + RelDnsPrefetch = Rel("dns-prefetch") + RelDuplicate = Rel("duplicate") + RelEdit = Rel("edit") + RelEditForm = Rel("edit-form") + RelEditMedia = Rel("edit-media") + RelEnclosure = Rel("enclosure") + RelExternal = Rel("external") + RelFirst = Rel("first") + RelGeofeed = Rel("geofeed") + RelGlossary = Rel("glossary") + RelHelp = Rel("help") + RelHosts = Rel("hosts") + RelHub = Rel("hub") + RelIceServer = Rel("ice-server") + RelIcon = Rel("icon") + RelIndex = Rel("index") + RelIntervalafter = Rel("intervalafter") + RelIntervalbefore = Rel("intervalbefore") + RelIntervalcontains = Rel("intervalcontains") + RelIntervaldisjoint = Rel("intervaldisjoint") + RelIntervalduring = Rel("intervalduring") + RelIntervalequals = Rel("intervalequals") + RelIntervalfinishedby = Rel("intervalfinishedby") + RelIntervalfinishes = Rel("intervalfinishes") + RelIntervalin = Rel("intervalin") + RelIntervalmeets = Rel("intervalmeets") + RelIntervalmetby = Rel("intervalmetby") + RelIntervaloverlappedby = Rel("intervaloverlappedby") + RelIntervaloverlaps = Rel("intervaloverlaps") + RelIntervalstartedby = Rel("intervalstartedby") + RelIntervalstarts = Rel("intervalstarts") + RelItem = Rel("item") + RelLast = Rel("last") + RelLatestVersion = Rel("latest-version") + RelLicense = Rel("license") + RelLinkset = Rel("linkset") + RelLrdd = Rel("lrdd") + RelManifest = Rel("manifest") + RelMaskIcon = Rel("mask-icon") + RelMe = Rel("me") + RelMediaFeed = Rel("media-feed") + RelMemento = Rel("memento") + RelMicropub = Rel("micropub") + RelModulepreload = Rel("modulepreload") + RelMonitor = Rel("monitor") + RelMonitorGroup = Rel("monitor-group") + RelNext = Rel("next") + RelNextArchive = Rel("next-archive") + RelNofollow = Rel("nofollow") + RelNoopener = Rel("noopener") + RelNoreferrer = Rel("noreferrer") + RelOpener = Rel("opener") + RelOpenid2LocalId = Rel("openid2.local_id") + RelOpenid2Provider = Rel("openid2.provider") + RelOriginal = Rel("original") + RelP3pv1 = Rel("p3pv1") + RelPayment = Rel("payment") + RelPingback = Rel("pingback") + RelPreconnect = Rel("preconnect") + RelPredecessorVersion = Rel("predecessor-version") + RelPrefetch = Rel("prefetch") + RelPreload = Rel("preload") + RelPrerender = Rel("prerender") + RelPrev = Rel("prev") + RelPrevArchive = Rel("prev-archive") + RelPreview = Rel("preview") + RelPrevious = Rel("previous") + RelPrivacyPolicy = Rel("privacy-policy") + RelProfile = Rel("profile") + RelPublication = Rel("publication") + RelRdapActive = Rel("rdap-active") + RelRdapBottom = Rel("rdap-bottom") + RelRdapDown = Rel("rdap-down") + RelRdapTop = Rel("rdap-top") + RelRdapUp = Rel("rdap-up") + RelRelated = Rel("related") + RelReplies = Rel("replies") + RelRestconf = Rel("restconf") + RelRuleinput = Rel("ruleinput") + RelSearch = Rel("search") + RelSection = Rel("section") + RelSelf = Rel("self") + RelService = Rel("service") + RelServiceDesc = Rel("service-desc") + RelServiceDoc = Rel("service-doc") + RelServiceMeta = Rel("service-meta") + RelSipTrunkingCapability = Rel("sip-trunking-capability") + RelSponsored = Rel("sponsored") + RelStart = Rel("start") + RelStatus = Rel("status") + RelStylesheet = Rel("stylesheet") + RelSubsection = Rel("subsection") + RelSuccessorVersion = Rel("successor-version") + RelSunset = Rel("sunset") + RelTag = Rel("tag") + RelTermsOfService = Rel("terms-of-service") + RelTimegate = Rel("timegate") + RelTimemap = Rel("timemap") + RelType = Rel("type") + RelUgc = Rel("ugc") + RelUp = Rel("up") + RelVersionHistory = Rel("version-history") + RelVia = Rel("via") + RelWebmention = Rel("webmention") + RelWorkingCopy = Rel("working-copy") + RelWorkingCopyOf = Rel("working-copy-of") + + LocationTypeOptionAircraft = LocationTypeOption("aircraft") + LocationTypeOptionAirport = LocationTypeOption("airport") + LocationTypeOptionArena = LocationTypeOption("arena") + LocationTypeOptionAutomobile = LocationTypeOption("automobile") + LocationTypeOptionBank = LocationTypeOption("bank") + LocationTypeOptionBar = LocationTypeOption("bar") + LocationTypeOptionBicycle = LocationTypeOption("bicycle") + LocationTypeOptionBus = LocationTypeOption("bus") + LocationTypeOptionBusStation = LocationTypeOption("bus-station") + LocationTypeOptionCafe = LocationTypeOption("cafe") + LocationTypeOptionCampground = LocationTypeOption("campground") + LocationTypeOptionCareFacility = LocationTypeOption("care-facility") + LocationTypeOptionClassroom = LocationTypeOption("classroom") + LocationTypeOptionClub = LocationTypeOption("club") + LocationTypeOptionConstruction = LocationTypeOption("construction") + LocationTypeOptionConventionCenter = LocationTypeOption("convention-center") + LocationTypeOptionDetachedUnit = LocationTypeOption("detached-unit") + LocationTypeOptionFireStation = LocationTypeOption("fire-station") + LocationTypeOptionGovernment = LocationTypeOption("government") + LocationTypeOptionHospital = LocationTypeOption("hospital") + LocationTypeOptionHotel = LocationTypeOption("hotel") + LocationTypeOptionIndustrial = LocationTypeOption("industrial") + LocationTypeOptionLandmarkAddress = LocationTypeOption("landmark-address") + LocationTypeOptionLibrary = LocationTypeOption("library") + LocationTypeOptionMotorcycle = LocationTypeOption("motorcycle") + LocationTypeOptionMunicipalGarage = LocationTypeOption("municipal-garage") + LocationTypeOptionMuseum = LocationTypeOption("museum") + LocationTypeOptionOffice = LocationTypeOption("office") + LocationTypeOptionOther = LocationTypeOption("other") + LocationTypeOptionOutdoors = LocationTypeOption("outdoors") + LocationTypeOptionParking = LocationTypeOption("parking") + LocationTypeOptionPhoneBox = LocationTypeOption("phone-box") + LocationTypeOptionPlaceOfWorship = LocationTypeOption("place-of-worship") + LocationTypeOptionPostOffice = LocationTypeOption("post-office") + LocationTypeOptionPrison = LocationTypeOption("prison") + LocationTypeOptionPublic = LocationTypeOption("public") + LocationTypeOptionPublicTransport = LocationTypeOption("public-transport") + LocationTypeOptionResidence = LocationTypeOption("residence") + LocationTypeOptionRestaurant = LocationTypeOption("restaurant") + LocationTypeOptionSchool = LocationTypeOption("school") + LocationTypeOptionShoppingArea = LocationTypeOption("shopping-area") + LocationTypeOptionStadium = LocationTypeOption("stadium") + LocationTypeOptionStore = LocationTypeOption("store") + LocationTypeOptionStreet = LocationTypeOption("street") + LocationTypeOptionTheater = LocationTypeOption("theater") + LocationTypeOptionTollBooth = LocationTypeOption("toll-booth") + LocationTypeOptionTownHall = LocationTypeOption("town-hall") + LocationTypeOptionTrain = LocationTypeOption("train") + LocationTypeOptionTrainStation = LocationTypeOption("train-station") + LocationTypeOptionTruck = LocationTypeOption("truck") + LocationTypeOptionUnderway = LocationTypeOption("underway") + LocationTypeOptionUnknown = LocationTypeOption("unknown") + LocationTypeOptionUtilitybox = LocationTypeOption("utilitybox") + LocationTypeOptionWarehouse = LocationTypeOption("warehouse") + LocationTypeOptionWasteTransferFacility = LocationTypeOption("waste-transfer-facility") + LocationTypeOptionWater = LocationTypeOption("water") + LocationTypeOptionWatercraft = LocationTypeOption("watercraft") + LocationTypeOptionWaterFacility = LocationTypeOption("water-facility") + LocationTypeOptionYouthCamp = LocationTypeOption("youth-camp") + + VirtualLocationFeatureAudio = VirtualLocationFeature("audio") + VirtualLocationFeatureChat = VirtualLocationFeature("chat") + VirtualLocationFeatureFeed = VirtualLocationFeature("feed") + VirtualLocationFeatureModerator = VirtualLocationFeature("moderator") + VirtualLocationFeaturePhone = VirtualLocationFeature("phone") + VirtualLocationFeatureScreen = VirtualLocationFeature("screen") + VirtualLocationFeatureVideo = VirtualLocationFeature("video") + + FrequencyYearly = Frequency("yearly") + FrequencyMonthly = Frequency("monthly") + FrequencyWeekly = Frequency("weekly") + FrequencyDaily = Frequency("daily") + FrequencyHourly = Frequency("hourly") + FrequencyMinutely = Frequency("minutely") + FrequencySecondly = Frequency("secondly") + + SkipOmit = Skip("omit") + SkipBackward = Skip("backward") + SkipForward = Skip("forward") + + DayOfWeekMonday = DayOfWeek("mo") + DayOfWeekTuesday = DayOfWeek("tu") + DayOfWeekWednesday = DayOfWeek("we") + DayOfWeekThursday = DayOfWeek("th") + DayOfWeekFriday = DayOfWeek("fr") + DayOfWeekSaturday = DayOfWeek("sa") + DayOfWeekSunday = DayOfWeek("su") + + RscaleIso8601 = "iso8601" + + FreeBusyStatusFree = FreeBusyStatus("free") + FreeBusyStatusBusy = FreeBusyStatus("busy") + + PrivacyPublic = Privacy("public") + PrivacyPrivate = Privacy("private") + PrivacySecret = Privacy("secret") + + ReplyMethodImip = ReplyMethod("imip") + ReplyMethodWeb = ReplyMethod("web") + ReplyMethodOther = ReplyMethod("other") + + SendToMethodImip = SendToMethod("imip") + SendToMethodOther = SendToMethod("other") + + ParticipantKindIndividual = ParticipantKind("individual") + ParticipantKindGroup = ParticipantKind("group") + ParticipantKindLocation = ParticipantKind("location") + ParticipantKindResource = ParticipantKind("resource") + + RoleOwner = Role("owner") + RoleOptional = Role("optional") + RoleInformational = Role("informational") + RoleChair = Role("chair") + RoleRequired = Role("required") + + // JMAP Task extension: the participant is expected to work on the task. + RoleAssignee = Role("assignee") + + ParticipationStatusNeedsAction = ParticipationStatus("needs-action") + ParticipationStatusAccepted = ParticipationStatus("accepted") + ParticipationStatusDeclined = ParticipationStatus("declined") + ParticipationStatusTentative = ParticipationStatus("tentative") + ParticipationStatusDelegated = ParticipationStatus("delegated") + + ScheduleAgentServer = ScheduleAgent("server") + ScheduleAgentClient = ScheduleAgent("client") + ScheduleAgentNone = ScheduleAgent("none") + + DefaultScheduleAgent = ScheduleAgentServer + + ProgressNeedsAction = Progress("needs-action") + ProgressInProcess = Progress("in-process") + ProgressCompleted = Progress("completed") + ProgressFailed = Progress("failed") + ProgressCancelled = Progress("cancelled") + + RelativeToStart = RelativeTo("start") + RelativeToEnd = RelativeTo("end") + + AlertActionDisplay = AlertAction("display") + AlertActionEmail = AlertAction("email") + + DefaultAlertAction = AlertActionDisplay + + StatusConfirmed = Status("confirmed") + StatusCancelled = Status("cancelled") + StatusTentative = Status("tentative") +) + +var ( + Relationships = []Relationship{ + RelationshipFirst, + RelationshipNext, + RelationshipChild, + RelationshipParent, + RelationshipDependsOn, + RelationshipClone, + RelationshipDuplicate, + RelationshipCause, + } + + Displays = []Display{ + DisplayBadge, + DisplayGraphic, + DisplayFullsize, + DisplayThumbnail, + } + + Rels = []Rel{ + RelAbout, + RelAcl, + RelAlternate, + RelAmphtml, + RelApiCatalog, + RelAppendix, + RelAppleTouchIcon, + RelAppleTouchStartupImage, + RelArchives, + RelAuthor, + RelBlockedBy, + RelBookmark, + RelC2paManifest, + RelCanonical, + RelChapter, + RelCiteAs, + RelCollection, + RelCompressionDictionary, + RelContents, + RelConvertedfrom, + RelCopyright, + RelCreateForm, + RelCurrent, + RelDeprecation, + RelDescribedby, + RelDescribes, + RelDisclosure, + RelDnsPrefetch, + RelDuplicate, + RelEdit, + RelEditForm, + RelEditMedia, + RelEnclosure, + RelExternal, + RelFirst, + RelGeofeed, + RelGlossary, + RelHelp, + RelHosts, + RelHub, + RelIceServer, + RelIcon, + RelIndex, + RelIntervalafter, + RelIntervalbefore, + RelIntervalcontains, + RelIntervaldisjoint, + RelIntervalduring, + RelIntervalequals, + RelIntervalfinishedby, + RelIntervalfinishes, + RelIntervalin, + RelIntervalmeets, + RelIntervalmetby, + RelIntervaloverlappedby, + RelIntervaloverlaps, + RelIntervalstartedby, + RelIntervalstarts, + RelItem, + RelLast, + RelLatestVersion, + RelLicense, + RelLinkset, + RelLrdd, + RelManifest, + RelMaskIcon, + RelMe, + RelMediaFeed, + RelMemento, + RelMicropub, + RelModulepreload, + RelMonitor, + RelMonitorGroup, + RelNext, + RelNextArchive, + RelNofollow, + RelNoopener, + RelNoreferrer, + RelOpener, + RelOpenid2LocalId, + RelOpenid2Provider, + RelOriginal, + RelP3pv1, + RelPayment, + RelPingback, + RelPreconnect, + RelPredecessorVersion, + RelPrefetch, + RelPreload, + RelPrerender, + RelPrev, + RelPrevArchive, + RelPreview, + RelPrevious, + RelPrivacyPolicy, + RelProfile, + RelPublication, + RelRdapActive, + RelRdapBottom, + RelRdapDown, + RelRdapTop, + RelRdapUp, + RelRelated, + RelReplies, + RelRestconf, + RelRuleinput, + RelSearch, + RelSection, + RelSelf, + RelService, + RelServiceDesc, + RelServiceDoc, + RelServiceMeta, + RelSipTrunkingCapability, + RelSponsored, + RelStart, + RelStatus, + RelStylesheet, + RelSubsection, + RelSuccessorVersion, + RelSunset, + RelTag, + RelTermsOfService, + RelTimegate, + RelTimemap, + RelType, + RelUgc, + RelUp, + RelVersionHistory, + RelVia, + RelWebmention, + RelWorkingCopy, + RelWorkingCopyOf, + } + + LocationTypeOptions = []LocationTypeOption{ + LocationTypeOptionAircraft, + LocationTypeOptionAirport, + LocationTypeOptionArena, + LocationTypeOptionAutomobile, + LocationTypeOptionBank, + LocationTypeOptionBar, + LocationTypeOptionBicycle, + LocationTypeOptionBus, + LocationTypeOptionBusStation, + LocationTypeOptionCafe, + LocationTypeOptionCampground, + LocationTypeOptionCareFacility, + LocationTypeOptionClassroom, + LocationTypeOptionClub, + LocationTypeOptionConstruction, + LocationTypeOptionConventionCenter, + LocationTypeOptionDetachedUnit, + LocationTypeOptionFireStation, + LocationTypeOptionGovernment, + LocationTypeOptionHospital, + LocationTypeOptionHotel, + LocationTypeOptionIndustrial, + LocationTypeOptionLandmarkAddress, + LocationTypeOptionLibrary, + LocationTypeOptionMotorcycle, + LocationTypeOptionMunicipalGarage, + LocationTypeOptionMuseum, + LocationTypeOptionOffice, + LocationTypeOptionOther, + LocationTypeOptionOutdoors, + LocationTypeOptionParking, + LocationTypeOptionPhoneBox, + LocationTypeOptionPlaceOfWorship, + LocationTypeOptionPostOffice, + LocationTypeOptionPrison, + LocationTypeOptionPublic, + LocationTypeOptionPublicTransport, + LocationTypeOptionResidence, + LocationTypeOptionRestaurant, + LocationTypeOptionSchool, + LocationTypeOptionShoppingArea, + LocationTypeOptionStadium, + LocationTypeOptionStore, + LocationTypeOptionStreet, + LocationTypeOptionTheater, + LocationTypeOptionTollBooth, + LocationTypeOptionTownHall, + LocationTypeOptionTrain, + LocationTypeOptionTrainStation, + LocationTypeOptionTruck, + LocationTypeOptionUnderway, + LocationTypeOptionUnknown, + LocationTypeOptionUtilitybox, + LocationTypeOptionWarehouse, + LocationTypeOptionWasteTransferFacility, + LocationTypeOptionWater, + LocationTypeOptionWatercraft, + LocationTypeOptionWaterFacility, + LocationTypeOptionYouthCamp, + } + + Frequencies = []Frequency{ + FrequencyYearly, + FrequencyMonthly, + FrequencyWeekly, + FrequencyDaily, + FrequencyHourly, + FrequencyMinutely, + FrequencySecondly, + } + + Skips = []Skip{ + SkipOmit, + SkipBackward, + SkipForward, + } + + RecurrentOverridesIgnoredPrefixes = []string{ + "@type", + "excludedRecurrenceRules", + "method", + "privacy", + "prodId", + "recurrenceId", + "recurrenceIdTimeZone", + "recurrenceOverrides", + "recurrenceRules", + "relatedTo", + "replyTo", + "sentBy", + "uid", + } + + LocalizationRequiredSuffixes = []string{ + "title", + "description", + "name", + } + + FreeBusyStatuses = []FreeBusyStatus{ + FreeBusyStatusFree, + FreeBusyStatusBusy, + } + + Privacies = []Privacy{ + PrivacyPublic, + PrivacyPrivate, + PrivacySecret, + } + + ReplyMethods = []ReplyMethod{ + ReplyMethodImip, + ReplyMethodWeb, + ReplyMethodOther, + } + + SendToMethods = []SendToMethod{ + SendToMethodImip, + SendToMethodOther, + } + + ParticipantKinds = []ParticipantKind{ + ParticipantKindIndividual, + ParticipantKindGroup, + ParticipantKindLocation, + ParticipantKindResource, + } + + Roles = []Role{ + RoleOwner, + RoleOptional, + RoleInformational, + RoleChair, + RoleRequired, + RoleAssignee, + } + + ParticipationStatuses = []ParticipationStatus{ + ParticipationStatusNeedsAction, + ParticipationStatusAccepted, + ParticipationStatusDeclined, + ParticipationStatusTentative, + ParticipationStatusDelegated, + } + + ScheduleAgents = []ScheduleAgent{ + ScheduleAgentServer, + ScheduleAgentClient, + ScheduleAgentNone, + } + + Progresses = []Progress{ + ProgressNeedsAction, + ProgressInProcess, + ProgressCompleted, + ProgressFailed, + ProgressCancelled, + } + + RelativeTos = []RelativeTo{ + RelativeToStart, + RelativeToEnd, + } + + AlertActions = []AlertAction{ + AlertActionDisplay, + AlertActionEmail, + } + + Statuses = []Status{ + StatusConfirmed, + StatusCancelled, + StatusTentative, + } +) + +/* +const RFC3339Local = "2006-01-02T15:04:05" + +func (t LocalDateTime) MarshalJSON() ([]byte, error) { + return []byte("\"" + t.UTC().Format(RFC3339Local) + "\""), nil +} + +func (t *LocalDateTime) UnmarshalJSON(b []byte) error { + str := string(b) + if strings.HasPrefix(str, "\"") && !strings.HasSuffix(str, "Z\"") { + str = str[0:len(str)-1] + "Z\"" + } + var tt time.Time + err := tt.UnmarshalJSON([]byte(str)) + if err != nil { + return err + } + t.Time = tt.UTC() + return nil +} +*/ + +// A `PatchObject` is of type `String[*]` and represents an unordered set of patches on a JSON object. +// +// Each key is a path represented in a subset of the JSON Pointer format [RFC6901]. +// +// The paths have an implicit leading `/`, so each key is prefixed with `/` before applying the +// JSON Pointer evaluation algorithm. +// +// A patch within a `PatchObject` is only valid if all of the following conditions apply: +// !1. The pointer MUST NOT reference inside an array (i.e., you MUST NOT insert/delete +// from an array; the array MUST be replaced in its entirety instead). +// !2. All parts prior to the last (i.e., the value after the final slash) MUST already +// exist on the object being patched. +// !3. There MUST NOT be two patches in the `PatchObject` where the pointer of one is +// the prefix of the pointer of the other, e.g., `alerts/1/offset` and `alerts`. +// !4. The value for the patch MUST be valid for the property being set (of the correct +// type and obeying any other applicable restrictions), or, if null, the property +// MUST be optional. +// +// The value associated with each pointer determines how to apply that patch: +// !- If null, remove the property from the patched object. If the key is not present in the parent, +// this a no-op. +// !- If non-null, set the value given as the value for this property (this may be a replacement +// or addition to the object being patched). +// +// A `PatchObject` does not define its own `@type` property. +// An `@type` property in a patch MUST be handled as any other patched property value.Implementations +// MUST reject a `PatchObject` in its entirety if any of its patches are invalid. +// Implementations MUST NOT apply partial patches. +// +// The `PatchObject` format is used to significantly reduce file size and duplicated content when +// specifying variations to a common object, such as with recurring events or when translating the +// data into multiple languages. +// +// It can also better preserve semantic intent if only the properties that should differ between +// the two objects are patched. For example, if one person is not going to a particular instance +// of a regularly scheduled event, in iCalendar, you would have to duplicate the entire event in +// the override. In JSCalendar, this is a small patch to show the difference. +// +// As only this property is patched, if the location of the event is changed, the occurrence will +// automatically still inherit this. +type PatchObject map[string]any + +// A Relation object defines the relation to other objects, using a possibly empty set of relation types. +// +// The object that defines this relation is the linking object, while the other object is the linked +// object. +type Relation struct { + // This specifies the type of this object. + // + // This MUST be `Relation`. + Type TypeOfRelation `json:"@type,omitempty"` + + // This describes how the linked object is related to the linking object. + // + // The relation is defined as a set of relation types. + // + // If empty, the relationship between the two objects is unspecified. + // + // Keys in the set MUST be one of the following values, specified in the + // property definition where the `Relation` object is used: + // !- `first`: The linked object is the first in a series the linking object is part of. + // !- `next`: The linked object is next in a series the linking object is part of. + // !- `child`: The linked object is a subpart of the linking object. + // !- `parent`: The linking object is a subpart of the linked object. + // + // The value for each key in the map MUST be true. + Relation map[Relationship]bool `json:"relation,omitempty"` +} + +type Link struct { + // This specifies the type of this object. + // + // This MUST be `Link`. + Type TypeOfLink `json:"@type,omitempty"` + + // This is a URI from which the resource may be fetched. + // + // This MAY be a `data:` URL [RFC2397], but it is recommended that the file be hosted on a + // server to avoid embedding arbitrarily large data in JSCalendar object instances. + Href string `json:"href"` + + // This is the media type [RFC6838] of the resource, if known. + ContentType string `json:"contentType,omitempty"` + + // This is the size, in octets, of the resource when fully decoded + // (i.e., the number of octets in the file the user would download), if known. + // + // Note that this is an informational estimate, and implementations must be prepared to handle + // the actual size being quite different when the resource is fetched. + Size uint `json:"size,omitzero"` + + // This identifies the relation of the linked resource to the object. + // + // If set, the value MUST be a relation type from the IANA "Link Relations" registry + // [LINKRELS], as established in [RFC8288]. + Rel Rel `json:"rel,omitempty"` + + // This describes the intended purpose of a link to an image. + // + // If set, the `rel` property MUST be set to icon. + // + // The value MUST be one of the following values: + // !- `badge`: an image meant to be displayed alongside the title of the object + // !- `graphic`: a full image replacement for the object itself + // !- `fullsize`: an image that is used to enhance the object + // !- `thumbnail`: a smaller variant of fullsize to be used when space for the image is constrained + Display Display `json:"display,omitempty"` + + // This is a human-readable, plain-text description of the resource. + Title string `json:"title,omitempty"` +} + +type Location struct { + // This specifies the type of this object. + // + // This MUST be `Location`. + Type TypeOfLocation `json:"@type,omitempty"` + + // This is the human-readable name of the location. + Name string `json:"name,omitempty"` + + // This is a set of one or more location types that describe this location. + // + // All types MUST be from the "Location Types Registry" [LOCATIONTYPES], as defined in [RFC4589]. + // + // The set is represented as a map, with the keys being the location types. + // + // The value for each key in the map MUST be `true`. + LocationTypes map[LocationTypeOption]bool `json:"locationTypes,omitempty"` + + // This is a geo: URI [RFC5870] for the location. + Coordinates string `json:"coordinates,omitempty"` + + // This is a map of link ids to `Link` objects, representing external resources associated with this + // location, for example, a vCard or image. + // + // If there are no links, this MUST be omitted (rather than specified as an empty set). + Links map[string]Link `json:"links,omitempty"` +} + +type VirtualLocation struct { + // This specifies the type of this object. This MUST be `VirtualLocation`. + Type TypeOfVirtualLocation `json:"@type,omitempty"` + + // This is the human-readable name of the virtual location. + Name string `json:"name,omitempty"` + + // Mandatory: this is a URI [RFC3986] that represents how to connect to this virtual location. + // + // This may be a telephone number (represented using the `tel:` scheme, e.g., `tel:+1-555-555-5555`) + // for a teleconference, a web address for online chat, or any custom URI. + Uri string `json:"uri"` + + // A set of features supported by this virtual location. + // + // The set is represented as a map, with the keys being the feature. + // + // The value for each key in the map MUST be true. + // + // The feature MUST be one of the following values; any value the client or server + // doesn't understand should be treated the same as if this feature is omitted: + // !- `audio`: Audio conferencing + // !- `chat`: Chat or instant messaging + // !- `feed`: Blog or atom feed + // !- `moderator`: Provides moderator-specific features + // !- `phone`: Phone conferencing + // !- `screen`: Screen sharing + // !- `video`: Video conferencing + Features map[VirtualLocationFeature]bool `json:"features,omitempty"` +} + +type NDay struct { + // This specifies the type of this object. This MUST be `NDay`. + Type TypeOfNDay `json:"@type,omitempty"` + + // This is a day of the week on which to repeat; the allowed values are the same as for the + // `firstDayOfWeek` `recurrenceRule` property. + // + // This is the day of the week of the `BYDAY` part in iCalendar, converted to lowercase. + Day DayOfWeek `json:"day"` + + // If present, rather than representing every occurrence of the weekday defined in the `day` + // property, it represents only a specific instance within the recurrence period. + // + // The value can be positive or negative but MUST NOT be zero. + // + // A negative integer means the nth-last occurrence within that period (i.e., -1 is the last + // occurrence, -2 the one before that, etc.). + // + // This is the ordinal part of the `BYDAY` value in iCalendar (e.g., `1` or `-3`). + NthOfPeriod int `json:"nthOfPeriod,omitzero"` +} + +// A RecurrenceRule object is a JSON object mapping of a `RECUR` value type in iCalendar +// [RFC5545] [RFC7529] and has the same semantics. +// +// [RFC5545]: https://www.rfc-editor.org/rfc/rfc5545.html +// [RFC7529]: https://www.rfc-editor.org/rfc/rfc7529.html +type RecurrenceRule struct { + // This specifies the type of this object. This MUST be ` RecurrenceRule`. + Type TypeOfRecurrenceRule `json:"@type,omitempty"` + + // This is the time span covered by each iteration of this recurrence rule. + // + // This MUST be one of the following values: + // !- `yearly` + // !- `monthly` + // !- `weekly` + // !- `daily` + // !- `hourly` + // !- `minutely` + // !- `secondly` + // + // This is the `FREQ` part from iCalendar, converted to lowercase. + Frequency Frequency `json:"frequency,omitempty"` + + // This is the interval of iteration periods at which the recurrence repeats. + // + // If included, it MUST be an integer >= `1`. + // + // This is the `INTERVAL` part from iCalendar. + // + // Default: 1 + Interval uint `json:"interval,omitzero"` + + // This is the calendar system in which this recurrence rule operates, in lowercase. + // + // This MUST be either a CLDR-registered calendar system name [CLDR] or a vendor-specific + // value. + // + // This is the `RSCALE` part from iCalendar RSCALE [RFC7529], converted to lowercase. + // + // Default: gregorian + // + // [CLDR]: https://github.com/unicode-org/cldr/blob/latest/common/bcp47/calendar.xml + // [RFC7529]: https://www.rfc-editor.org/rfc/rfc7529.html + Rscale string `json:"rscale,omitempty"` + + // This is the behavior to use when the expansion of the recurrence produces invalid dates. + // + // This property only has an effect if the frequency is `yearly` or `monthly`. + // + // It MUST be one of the following values: + // !- `omit` + // !- `backward` + // !- `forward` + // + // This is the `SKIP` part from iCalendar `RSCALE` [RFC7529], converted to lowercase. + // + // Default: omit + Skip Skip `json:"skip,omitempty"` + + // This is the day on which the week is considered to start, represented as a lowercase, abbreviated, + // and two-letter English day of the week. + // + // If included, it MUST be one of the following values: + // !- `mo` + // !- `tu` + // !- `we` + // !- `th` + // !- `fr` + // !- `sa` + // !- `su` + // + // This is the `WKST` part from iCalendar. + // + // Default: mo + FirstDayOfWeek DayOfWeek `json:"firstDayOfWeek,omitempty"` + + // These are days of the week on which to repeat. + ByDay []NDay `json:"byDay,omitempty"` + + // These are the days of the month on which to repeat. + // + // Valid values are between 1 and the maximum number of days any month may have in the calendar given by + // the `rscale` property and the negative values of these numbers. + // + // For example, in the Gregorian calendar, valid values are `1` to `31` and `-31` to `-1`. + // + // Negative values offset from the end of the month. + // + // The array MUST have at least one entry if included. + // + // This is the `BYMONTHDAY` part in iCalendar. + ByMonthDay []int `json:"byMonthDay,omitempty"` + + // These are the months in which to repeat. + // + // Each entry is a string representation of a number, starting from `"1"` for the first month in the + // calendar (e.g., `"1"` means January with the Gregorian calendar), with an optional `"L"` suffix + // (see [RFC7529]) for leap months (this MUST be uppercase, e.g., `"3L"`). + // + // The array MUST have at least one entry if included. + // + // This is the `BYMONTH` part from iCalendar. + // + // [RFC7529]: https://www.rfc-editor.org/rfc/rfc7529.html + ByMonth []string `json:"byMonth,omitempty"` + + // These are the days of the year on which to repeat. + // + // Valid values are between `1` and the maximum number of days any year may have in the calendar given + // by the `rscale` property and the negative values of these numbers. + // + // For example, in the Gregorian calendar, valid values are `1` to `366` and `-366` to `-1`. + // + // Negative values offset from the end of the year. + // + // The array MUST have at least one entry if included. + // + // This is the `BYYEARDAY` part from iCalendar. + ByYearDay []int `json:"byYearDay,omitempty"` + + // These are the weeks of the year in which to repeat. + // + // Valid values are between `1` and the maximum number of weeks any year may have in the calendar + // given by the `rscale` property and the negative values of these numbers. + // + // For example, in the Gregorian calendar, valid values are `1` to `53` and `-53` to `-1`. + // + // The array MUST have at least one entry if included. + // + // This is the `BYWEEKNO` part from iCalendar. + ByWeekNo []int `json:"byWeekNo,omitempty"` + + // These are the hours of the day in which to repeat. + // + // Valid values are `0` to `23`. + // + // The array MUST have at least one entry if included. + // + // This is the `BYHOUR` part from iCalendar. + ByHour []uint `json:"byHour,omitempty"` + + // These are the minutes of the hour in which to repeat. + // + // Valid values are `0` to `59`. + // + // The array MUST have at least one entry if included. + // + // This is the `BYMINUTE` part from iCalendar. + ByMinute []uint `json:"byMinute,omitempty"` + + // These are the seconds of the minute in which to repeat. + // + // Valid values are `0` to `60`. + // + // The array MUST have at least one entry if included. + // + // This is the `BYSECOND` part from iCalendar. + BySecond []uint `json:"bySecond,omitempty"` + + // These are the occurrences within the recurrence interval to include in the final results. + // + // Negative values offset from the end of the list of occurrences. + // + // The array MUST have at least one entry if included. + // + // This is the `BYSETPOS` part from iCalendar. + BySetPosition []int `json:"bySetPosition,omitempty"` + + // These are the number of occurrences at which to range-bound the recurrence. + // + // This MUST NOT be included if an until property is specified. + // + // This is the `COUNT` part from iCalendar. + Count uint `json:"count,omitzero"` + + // These are the date-time at which to finish recurring. + // + // The last occurrence is on or before this date-time. + // + // This MUST NOT be included if a `count` property is specified. + // + // Note that if not specified otherwise for a specific JSCalendar object, this date is to be + // interpreted in the time zone specified in the JSCalendar object's `timeZone` property. + // + // This is the UNTIL part from iCalendar. + Until *LocalDateTime `json:"until,omitempty"` +} + +type Participant struct { + // This specifies the type of this object. This MUST be ` Participant`. + Type TypeOfParticipant `json:"@type,omitempty"` + + // This is the display name of the participant (e.g., `"Joe Bloggs"``). + Name string `json:"name,omitempty"` + + // This is the email address to use to contact the participant or, for example, + // match with an address book entry. + // + // If set, the value MUST be a valid addr-spec value as defined in Section 3.4.1 of [RFC5322]. + Email string `json:"email,omitempty"` + + // This is a plain-text description of this participant. + // + // For example, this may include more information about their role in the event or how best to contact them. + Description string `json:"description,omitempty"` + + // This describes the media type of the contents of the description property. + // + // If this property is set, then the description property MUST be set. + DescriptionContentType string `json:"descriptionContentType,omitempty"` + + // This is a URI as defined by [RFC3986] or any other IANA-registered form for a URI. + // + // It is the same as the CAL-ADDRESS value of an iCalendar ATTENDEE property [RFC5545] (Section 3.8.4.1) + // or ORGANIZER property [RFC5545] (Section 3.8.4.3) — it globally identifies a particular participant, + // even across different calendaring objects. + CalendarAddress string `json:"calendarAddress,omitempty"` + + // This is what kind of entity this participant is, if known. + // + // If this property is set, then the calendarAddress property MUST be set. + // + // This MUST be one of the following values, another value registered in the IANA "JSCalendar Enum Values" registry, + // or a vendor-specific value (see Section 3.3). + // + // Any value the client or server doesn't understand should be treated the same as if this property is omitted. + // !- `individual`: a single person + // !- `group`: a collection of people invited as a whole + // !- `location`: a physical location that needs to be scheduled, e.g., a conference room + // !- `resource`: a non-human resource other than a location, such as a projector + Kind ParticipantKind `json:"kind,omitempty"` + + // This is a set of roles that this participant fulfills. + // + // If this property is set, then the calendarAddress property MUST be set. + // + // At least one role MUST be specified for the participant. + // + // The keys in the set MUST be one of the following values, another value registered in the IANA "JSCalendar Enum Values" + // registry, or a vendor-specific value (see Section 3.3): + // !- `owner`: The participant is an owner of the object. This signifies they have permission to make changes to it + // that affect the other participants. Nonowner participants may only change properties that affect only themselves + // (for example, setting their own alerts or changing their RSVP status). + // !- `attendee`: The participant is expected to be present at the event. + // !- `optional`: The participant's involvement with the event is optional. This is expected to be primarily combined + // with the `"attendee"` role. + // !- `informational`: The participant is copied for informational reasons and is not expected to attend. + // !- `chair`: The participant is in charge of the event/task when it occurs. + // !- `contact`: The participant is someone that may be contacted for information about the event. + // + // The value for each key in the map MUST be true. It is expected that no more than one of the roles + // `"attendee"` and `"informational"` be present; if more than one are given, `"attendee"` takes precedence + // over `"informational"`. + // + // Roles that are unknown to the implementation MUST be preserved. + Roles map[Role]bool `json:"roles,omitempty"` + + // This is the location at which this participant is expected to be attending. + // + // If the value does not correspond to any location id in the `locations` property of the JSCalendar object, + // this MUST be treated the same as if the participant's `locationId` were omitted. + LocationId string `json:"locationId,omitempty"` + + // This is the language tag, as defined in [RFC5646], that best describes the participant's preferred language, if known. + Language string `json:"language,omitempty"` + + // This is the participation status, if any, of this participant. + // + // The value MUST be one of the following values, another value registered in the IANA "JSCalendar Enum Values" registry, + // or a vendor-specific value (see Section 3.3): + // !- `needs-action`: No status has yet been set by the participant. + // !- `accepted`: The invited participant will participate. + // !- `declined`: The invited participant will not participate. + // !- `tentative`: The invited participant may participate. + // !- `delegated`: The invited participant has delegated their attendance to another participant, as specified in the `delegatedTo` property. + ParticipationStatus ParticipationStatus `json:"participationStatus,omitempty"` + + // This is a note from the participant to explain their participation status. + ParticipationComment string `json:"participationComment,omitempty"` + + // If true, the organizer is expecting the participant to notify them of their participation status. + // + // If this property is set, then the calendarAddress property MUST be set. + ExpectReply bool `json:"expectReply,omitzero"` + + // This is who is responsible for sending scheduling messages with this calendar object to the participant. + // + // The value MUST be one of the following values, another value registered in the IANA "JSCalendar Enum Values" + // registry, or a vendor-specific value (see Section 3.3): + // !- `server`: The calendar server will send the scheduling messages. + // !- `client`: The calendar client will send the scheduling messages. + // !- `none`: No scheduling messages are to be sent to this participant. + // + // Default: server + ScheduleAgent ScheduleAgent `json:"scheduleAgent,omitempty"` + + // A client may set the property on a participant to true to request that the server send a scheduling + // message to the participant when it would not normally do so (e.g., if no significant change is made + // the object or the scheduleAgent is set to client). + // + // The property MUST NOT be stored in the JSCalendar object on the server or appear in a scheduling message. + ScheduleForceSend bool `json:"scheduleForceSend,omitzero"` + + // This is the sequence number of the last response from the participant. + // + // If defined, this MUST be a nonnegative integer. + // + // This can be used to determine whether the participant has sent a new response following significant + // changes to the calendar object and to determine if future responses are responding to a current or older view of the data. + ScheduleSequence uint `json:"scheduleSequence,omitzero"` + + // This is a list of status codes, returned from the processing of the most recent scheduling message sent to this participant. + // + // The status codes MUST be valid statcode values as defined in the ABNF in [Section 3.8.8.3 of RFC5545]. + // + // Servers MUST only add or change this property when they send a scheduling message to the participant. + // + // Clients SHOULD NOT change or remove this property if it was provided by the server. + // + // Clients MAY add, change, or remove the property for participants where the client is handling the scheduling. + // + // This property MUST NOT be included in scheduling messages. + // + // [Section 3.8.8.3 of RFC5545]: https://www.rfc-editor.org/rfc/rfc5545#section-3.8.8.3 + ScheduleStatus []string `json:"scheduleStatus,omitempty"` + + // This is the timestamp for the most recent response from this participant. + // + // This is the updated property of the last response when using iTIP. It can be compared to the updated property in + // future responses to detect and discard older responses delivered out of order. + ScheduleUpdated time.Time `json:"scheduleUpdated,omitzero"` + + // This is the email address in the `"From"` header of the email that last updated this participant via iMIP. + // + // This SHOULD only be set if the email address is different to that in the mailto URI of this participant's `imip` + // method in the `sendTo` property (i.e., the response was received from a different address to that which the + // invitation was sent to). If set, the value MUST be a valid addr-spec value as defined in Section 3.4.1 of [RFC5322]. + SentBy string `json:"sentBy,omitempty"` + + // This is the id of the participant who added this participant to the event/task, if known. + InvitedBy string `json:"invitedBy,omitempty"` + + // This is set of participant ids that this participant has delegated their participation to. + // + // Each key in the set MUST be the id of a participant. + // + // The value for each key in the map MUST be true. + // + // If there are no delegates, this MUST be omitted (rather than specified as an empty set). + DelegatedTo map[string]bool `json:"delegatedTo,omitempty"` + + // This is a set of participant ids that this participant is acting as a delegate for. + // + // Each key in the set MUST be the id of a participant. + // + // The value for each key in the map MUST be true. + // + // If there are no delegators, this MUST be omitted (rather than specified as an empty set). + DelegatedFrom map[string]bool `json:"delegatedFrom,omitempty"` + + // This is a set of group participants that were invited to this calendar object, which caused this participant to + // be invited due to their membership in the group(s). + // + // Each key in the set MUST be the id of a participant. + // + // The value for each key in the map MUST be true. + // + // If there are no groups, this MUST be omitted (rather than specified as an empty set). + MemberOf map[string]bool `json:"memberOf,omitempty"` + + // This is a map of link ids to `Link` objects, representing external resources associated with this participant, + // for example, a vCard or image. + // + // Only allowed for participants of a Task. + // + // If there are no links, this MUST be omitted (rather than specified as an empty set). + Links map[string]Link `json:"links,omitempty"` + + // This represents the progress of the participant for this task. + // + // It MUST NOT be set if the `participationStatus` of this participant is any value other than `accepted`. + // + // Only allowed for participants of a Task. + // + // See Section 5.2.5 for allowed values and semantics. + Progress Progress `json:"progress,omitempty"` + + // This specifies the date-time the progress property was last set on this participant. + // + // Only allowed for participants of a Task. + // + // See Section 5.2.6 for allowed values and semantics. + ProgressUpdated time.Time `json:"progressUpdated,omitzero"` + + // This represents the percent completion of the participant for this task. + // + // Only allowed for participants of a Task. + // + // The property value MUST be a positive integer between 0 and 100. + PercentComplete uint `json:"percentComplete,omitzero"` + + // This is a URI as defined by [@!RFC3986] or any other IANA-registered form for a URI. + // + // It is the same as the `CAL-ADDRESS` value of an `ATTENDEE` or `ORGANIZER` in iCalendar ([@!RFC5545]); + // it globally identifies a particular participant, even across different events. + // + // This is a JMAP addition to JSCalendar. + ScheduleId string `json:"scheduleId,omitempty"` +} + +type Trigger interface { + trigger() +} + +type OffsetTrigger struct { + // This specifies the type of this object. This MUST be `OffsetTrigger`. + Type TypeOfOffsetTrigger `json:"@type,omitempty"` + + // This defines the offset at which to trigger the alert relative to the time property defined in + // the `relativeTo` property of the alert. + // + // Negative durations signify alerts before the time property; + // positive durations signify alerts after the time property. + Offset SignedDuration `json:"offset"` + + // This specifies the time property that the alert offset is relative to. + // + // The value MUST be one of the following: + // !- `start`: triggers the alert relative to the start of the calendar object + // !- `end`: triggers the alert relative to the end/due time of the calendar object + RelativeTo RelativeTo `json:"relativeTo,omitempty"` +} + +var _ Trigger = OffsetTrigger{} + +func (o OffsetTrigger) trigger() {} + +type AbsoluteTrigger struct { + // This specifies the type of this object. This MUST be `AbsoluteTrigger`. + Type TypeOfAbsoluteTrigger `json:"@type,omitempty"` + + // This defines a specific UTC date-time when the alert is triggered. + When time.Time `json:"when"` +} + +var _ Trigger = AbsoluteTrigger{} + +func (o AbsoluteTrigger) trigger() {} + +// An `UnknownTrigger` object is an object that contains an `@type` property whose value is not recognized +// (i.e., not `OffsetTrigger` or `AbsoluteTrigger`) plus zero or more other properties. +// +// This is for compatibility with client extensions and future specifications. +// +// Implementations SHOULD NOT trigger for trigger types they do not understand but MUST preserve them. +type UnknownTrigger map[string]any + +var _ Trigger = UnknownTrigger{} + +func (o UnknownTrigger) trigger() {} + +func MapstructTriggerHook() mapstructure.DecodeHookFunc { + fn := func(Trigger) {} + wanted := reflect.TypeOf(fn).In(0) + return func(from reflect.Type, to reflect.Type, data any) (any, error) { + if to != wanted { + return data, nil + } + m := data.(map[string]any) + if t, ok := m["@type"]; ok { + switch t { + case string(OffsetTriggerType): + return mapOffsetTrigger(m) + case string(AbsoluteTriggerType): + return mapAbsoluteTrigger(m) + default: + return UnknownTrigger(m), nil + } + } else { + if _, ok := m["offset"]; ok { + return mapOffsetTrigger(m) + } + if _, ok := m["when"]; ok { + return mapAbsoluteTrigger(m) + } else { + return UnknownTrigger(m), nil + } + } + } +} + +func mapOffsetTrigger(m map[string]any) (OffsetTrigger, error) { + trigger := OffsetTrigger{ + Type: OffsetTriggerType, + } + if value, ok := m["offset"]; ok { + if str, ok := value.(string); ok { + trigger.Offset = SignedDuration(str) + } + } + if value, ok := m["relativeTo"]; ok { + if str, ok := value.(string); ok { + t := RelativeTo(str) + if slices.Contains(RelativeTos, t) { + trigger.RelativeTo = t + } else { + return trigger, fmt.Errorf("unsupported Trigger.relativeTo value: '%v'", value) + } + } + } + return trigger, nil +} + +func mapAbsoluteTrigger(m map[string]any) (AbsoluteTrigger, error) { + trigger := AbsoluteTrigger{ + Type: AbsoluteTriggerType, + } + if value, ok := m["when"]; ok { + if str, ok := value.(string); ok { + if w, err := time.Parse(time.RFC3339, str); err != nil { + trigger.When = w + } else { + return trigger, err + } + } + } + return trigger, nil +} + +type Alert struct { + // This specifies the type of this object. This MUST be `Alert`. + Type TypeOfAlert `json:"@type,omitempty"` + + // This defines when to trigger the alert. + // + // New types may be defined in future documents. + Trigger Trigger `json:"trigger"` + + // This records when an alert was last acknowledged. + // + // This is set when the user has dismissed the alert; other clients that sync this property + // SHOULD automatically dismiss or suppress duplicate alerts (alerts with the same alert id + // that triggered on or before this date-time). + // + // For a recurring calendar object, setting the acknowledged property MUST NOT add a new override + // to the recurrenceOverrides property. + // + // If the alert is not already overridden, the acknowledged property MUST be set on the alert + // in the base event/task. + // + // Certain kinds of alert action may not provide feedback as to when the user sees them, for example, + // email-based alerts. + // + // For those kinds of alerts, this property MUST be set immediately when the alert is triggered + // and the action is successfully carried out. + Acknowledged time.Time `json:"acknowledged,omitzero"` + + // This relates this alert to other alerts in the same JSCalendar object. + // + // If the user wishes to snooze an alert, the application MUST create an alert to trigger after snoozing. + // This new snooze alert MUST set a parent relation to the identifier of the original alert. + RelatedTo map[string]Relation `json:"relatedTo,omitempty"` + + // This describes how to alert the user. + // + // The value MUST be at most one of the following values, a value registered in the IANA "JSCalendar Enum Values" + // registry, or a vendor-specific value (see Section 3.3): + // !- `display`: The alert should be displayed as appropriate for the current device and user context. + // !- `email`: The alert should trigger an email sent out to the user, notifying them of the alert. This action is + // typically only appropriate for server implementations. + // + // Default: display + Action AlertAction `json:"action,omitempty"` +} + +func (a *Alert) UnmarshalJSON(b []byte) error { + // use a simplified Trigger structure to only deserialize some of + // its fields, only in order to detect which Trigger type it actually + // is (Offset, Absolute, or Unknown) + var peekAlert struct { + Trigger struct { + Type string `json:"@type"` + Offset string `json:"offset"` + When string `json:"when"` + } `json:"trigger"` + } + if err := json.Unmarshal(b, &peekAlert); err != nil { + return err + } + switch peekAlert.Trigger.Type { + case string(OffsetTriggerType): + a.Trigger = new(OffsetTrigger) + case string(AbsoluteTriggerType): + a.Trigger = new(AbsoluteTrigger) + default: + // it could still be a Trigger without its optional @type field + if peekAlert.Trigger.Offset != "" { // if "offset" is set, it's an OffsetTrigger + a.Trigger = new(OffsetTrigger) + } else if peekAlert.Trigger.When != "" { // if "when" is set, it's an AbsoluteTrigger + a.Trigger = new(AbsoluteTrigger) + } else { // it's neither -> UnknownTrigger + a.Trigger = new(UnknownTrigger) + } + } + + type tmpAlert Alert // alias Alert to avoid infinite recursion into this func + return json.Unmarshal(b, (*tmpAlert)(a)) +} + +// A `TimeZoneRule` object maps a `STANDARD` or `DAYLIGHT` sub-component from iCalendar, +// with the restriction that, at most, one recurrence rule is allowed per rule. +type TimeZoneRule struct { + // This specifies the type of this object. This MUST be `TimeZoneRule`. + Type TypeOfTimeZoneRule `json:"@type,omitempty"` + + // This is the `DTSTART` property from iCalendar. + Start LocalDateTime `json:"start"` + + // This is the `TZOFFSETFROM` property from iCalendar: specifies the offset that is in use prior to this time zone observance. + // + // This property specifies the offset that is in use prior to this time observance. + // + // It is used to calculate the absolute time at which the transition to a given observance takes place. + // + // The property value is a signed numeric indicating the number of hours and possibly minutes from UTC. + // + // Positive numbers represent time zones east of the prime meridian, or ahead of UTC. + // + // Negative numbers represent time zones west of the prime meridian, or behind UTC. + // + // Mandatory. + // + // example: -0500 + OffsetFrom string `json:"offsetFrom"` + + // This is the TZOFFSETTO property from iCalendar: specifies the offset that is in use in this time zone observance. + // + // This property specifies the offset that is in use in this time zone observance. + // + // It is used to calculate the absolute time for the new observance. + // + // The property value is a signed numeric indicating the number of hours and possibly minutes from UTC. + // + // Positive numbers represent time zones east of the prime meridian, or ahead of UTC. + // + // Negative numbers represent time zones west of the prime meridian, or behind UTC. + // + // Mandatory. + // + // example: +1245 + OffsetTo string `json:"offsetTo"` + + // This is the `RRULE` property mapped. + // + // uring recurrence rule evaluation, the `until` property value MUST be interpreted + // as a local time in the UTC time zone. + RecurrenceRules []RecurrenceRule `json:"recurrenceRules,omitempty"` + + // This maps the `RDATE` properties from iCalendar. + // + // The set is represented as an object, with the keys being the recurrence dates. + // + // The patch object MUST be the empty JSON object (`{}`). + RecurrenceOverrides map[LocalDateTime]PatchObject `json:"recurrenceOverrides,omitempty"` + + // This maps the `TZNAME` properties from iCalendar to a JSON set. + // + // The set is represented as an object, with the keys being the names, excluding any + // `tznparam` component from iCalendar. + // + // The value for each key in the map MUST be true. + Names map[string]bool `json:"names,omitempty"` + + // This maps the `COMMENT` properties from iCalendar. + // + // The order MUST be preserved during conversion. + Comments []string `json:"comments,omitempty"` +} + +type TimeZone struct { + // This specifies the type of this object. This MUST be `TimeZone`. + Type TypeOfTimeZone `json:"@type,omitempty"` + + // This is the TZID property from iCalendar. + // + // Note that this implies that the value MUST be a valid `paramtext` value as specified in Section 3.1. of [RFC5545]. + TzId string `json:"tzId"` + + // This is the `LAST-MODIFIED` property from iCalendar. + Updated time.Time `json:"updated,omitzero"` + + // This is the `TZURL` property from iCalendar. + Url string `json:"url,omitempty"` + + // This is the TZUNTIL property from iCalendar, specified in [RFC7808]. + ValidUntil time.Time `json:"validUntil,omitzero"` + + // This maps the `TZID-ALIAS-OF` properties from iCalendar, specified in [RFC7808], to a JSON set of aliases. + // + // The set is represented as an object, with the keys being the aliases. + // + // The value for each key in the map MUST be `true`. + Aliases map[string]bool `json:"aliases,omitempty"` + + // This the `STANDARD` sub-components from iCalendar. + // + // The order MUST be preserved during conversion. + Standard []TimeZoneRule `json:"standard,omitempty"` + + // This the `DAYLIGHT` sub-components from iCalendar. + // + // The order MUST be preserved during conversion. + Daylight []TimeZoneRule `json:"daylight,omitempty"` +} + +type CommonObject struct { + // This is a globally unique identifier used to associate objects representing the same event, + // task, group, or other object across different systems, calendars, and views. + // + // For recurring events and tasks, the UID is associated with the base object and therefore + // is the same for all occurrences; the combination of the UID with a recurrenceId identifies + // a particular instance. + // + // The generator of the identifier MUST guarantee that the identifier is unique. + // + // [RFC4122] describes a range of established algorithms to generate universally unique identifiers + // (UUIDs). UUID version 4, described in Section 4.4 of [RFC4122], is RECOMMENDED. + // + // For compatibility with UIDs [RFC5545], implementations MUST be able to receive and persist + // values of at least 255 octets for this property, but they MUST NOT truncate values in the + // middle of a UTF-8 multi-octet sequence. + Uid string `json:"uid"` + + // This is the identifier for the product that last updated the JSCalendar object. + // + // This should be set whenever the data in the object is modified (i.e., whenever the updated property is set) + // + // .The vendor of the implementation MUST ensure that this is a globally unique identifier, using + // ome technique such as a Formal Public Identifier (FPI) value, as defined in [ISO.9070.1991]. + // + // This property SHOULD NOT be used to alter the interpretation of a JSCalendar object beyond the semantics + // specified in this document. + // + // For example, it is not to be used to further the understanding of nonstandard properties, a practice + // that is known to cause long-term interoperability problems. + ProdId string `json:"prodId,omitempty"` + + // This is the date and time this object was initially created. + Created UTCDateTime `json:"created,omitzero"` + + // This is the date and time the data in this object was last modified (or its creation date/time + // if not modified since). + Updated UTCDateTime `json:"updated,omitzero"` + + // This is a short summary of the object. + Title string `json:"title,omitempty"` + + // This is a longer-form text description of the object. + // + // The content is formatted according to the `descriptionContentType` property. + Description string `json:"description,omitempty"` + + // This describes the media type [RFC6838] of the contents of the description property. + // + // Media types MUST be subtypes of type text and SHOULD be text/plain or text/html [MEDIATYPES]. + // + // They MAY include parameters, and the charset parameter value MUST be utf-8, if specified. + // + // Descriptions of type text/html MAY contain cid URLs [RFC2392] to reference links in the calendar + // object by use of the cid property of the Link object. + // + // Default: text/plain + DescriptionContentType string `json:"descriptionContentType,omitempty"` + + // This is a map of link ids to `Link` objects, representing external resources associated with the object. + // + // Links with a `rel` of `enclosure` MUST be considered by the client to be attachments for download. + // + // Links with a `rel` of `describedby` MUST be considered by the client to be alternative representations of the + // `description`. + // + // Links with a `rel` of `icon` MUST be considered by the client to be images that it may use when presenting + // the calendar data to a user. The `display` property may be set to indicate the purpose of this image. + Links map[string]Link `json:"links,omitempty"` + + // This is the language tag, as defined in [RFC5646], that best describes the locale used for the text in + // the calendar object, if known. + // + // [RFC5646]: https://www.rfc-editor.org/rfc/rfc5646.html + Locale string `json:"locale,omitempty"` + + // This is a set of keywords or tags that relate to the object. + // + // The set is represented as a map, with the keys being the keywords. + // + // The value for each key in the map MUST be `true`. + Keywords map[string]bool `json:"keywords,omitempty"` + + // This is a set of categories that relate to the calendar object. + // + // The set is represented as a map, with the keys being the categories specified as URIs. + // + // The value for each key in the map MUST be `true`. + // + // In contrast to keywords, categories are typically structured. + // + // For example, a vendor owning the domain `example.com` might define the categories + // `http://example.com/categories/sports/american-football` and `http://example.com/categories/music/r-b`. + Categories map[string]bool `json:"categories,omitempty"` + + // This is a color clients MAY use when displaying this calendar object. + // + // The value is a color name taken from the set of names defined in [Section 4.3 of CSS Color Module Level 3] + // or an RGB value in hexadecimal notation, as defined in [Section 4.2.1 of CSS Color Module Level 3]. + // + // [Section 4.3 of CSS Color Module Level 3]: https://www.w3.org/TR/css-color-3/#svg-color + // [Section 4.2.1 of CSS Color Module Level 3]: https://www.w3.org/TR/css-color-3/#rgb-color + Color string `json:"color,omitempty"` +} + +// TODO +// +// ### Recurrence Properties +// +// Some events and tasks occur at regular or irregular intervals. Rather than having to copy the data for every occurrence, +// there can be a base event with rules to generate recurrences and/or overrides that add extra dates or exceptions to the rules. +// +// The recurrence set is the complete set of instances for an object. It is generated by considering the following properties in +// order, all of which are optional: +// !- The `recurrenceRules` property generates a set of extra date-times on which the object occurs. +// !- The `excludedRecurrenceRules` property generates a set of date-times that are to be removed from the previously generated +// set of date-times on which the object occurs. +// !- The `recurrenceOverrides` property defines date-times that are added or excluded to form the final set. (This property +// may also contain changes to the object to apply to particular instances.) +type Object struct { + CommonObject + + // This relates the object to other JSCalendar objects. + // + // This is represented as a map of the UIDs of the related objects to information about the relation. + // + // If an object is split to make a "this and future" change to a recurrence, the original object MUST + // be truncated to end at the previous occurrence before this split, and a new object is created to + // represent all the occurrences after the split. + // + // A next relation MUST be set on the original object's relatedTo property for the UID of the new object. + // + // A first relation for the UID of the first object in the series MUST be set on the new object. + // Clients can then follow these UIDs to get the complete set of objects if the user wishes to modify + // them all at once. + RelatedTo map[string]Relation `json:"relatedTo,omitempty"` + + // Initially zero, this MUST be incremented by one every time a change is made to the object, except + // if the change only modifies the `participants` property. + // + // This is used as part of the iCalendar Transport-independent Interoperability Protocol (iTIP) [RFC5546] + // to know which version of the object a scheduling message relates to. + Sequence uint `json:"sequence,omitzero"` + + /* + // CalendarEvent objects MUST NOT have a “method” property as this is only used when representing iTIP + // [@!RFC5546] scheduling messages, not events in a data store. + Method Method `json:"method,omitempty"` + */ + + // This indicates that the time is not important to display to the user when rendering this calendar object. + // + // An example of this is an event that conceptually occurs all day or across multiple days, such as + // `"New Year's Day"` or `"Italy Vacation"`. + // + // While the time component is important for free-busy calculations and checking for scheduling clashes, + // calendars may choose to omit displaying it and/or display the object separately to other objects to + // enhance the user's view of their schedule. + // + // Such events are also commonly known as "all-day" events. + // + // Default: false + ShowWithoutTime bool `json:"showWithoutTime,omitzero"` + + // This is a map of location ids to `Location` objects, representing locations associated with the object. + Locations map[string]Location `json:"locations,omitempty"` + + // This indicates which of the multiple entries in the locations property can be considered the main location + // for the event or task. + // + // A client implementation MAY choose to display this location more prominently. + // + // The main location is undefined if this property is not set. + // + // If this property is set, then its value MUST match a key in the locations property and the name property + // of that main Location object MUST be set. + MainLocationId string `json:"mainLocationId,omitempty"` + + // This is a map of virtual location ids to VirtualLocation objects, representing virtual locations, such as + // video conferences or chat rooms, associated with the object. + VirtualLocations map[string]VirtualLocation `json:"virtualLocations,omitempty"` + + // If present, this JSCalendar object represents one occurrence of a recurring JSCalendar object. + // + // If present, the `recurrenceRules` and `recurrenceOverrides` properties MUST NOT be present. + // + // The value is a date-time either produced by the `recurrenceRules` of the base event or + // added as a key to the `recurrenceOverrides` property of the base event. + RecurrenceId *LocalDateTime `json:"recurrenceId,omitempty"` + + // Identifies the time zone of the main JSCalendar object, of which this JSCalendar object is a recurrence instance. + // + // This property MUST be set if the `recurrenceId` property is set. + // + // It MUST NOT be set if the `recurrenceId` property is not set. + RecurrenceIdTimeZone string `json:"recurrenceIdTimeZone,omitempty"` + + // This defines a set of recurrence rules (repeating patterns) for recurring calendar objects. + // + // TODO select the right documentation for each copy of the Object class: + // + // An Event recurs by applying the recurrence rules to the start date-time. + // + // A Task recurs by applying the recurrence rules to the start date-time, if defined; otherwise, it recurs by + // the due date-time, if defined. If the task defines neither a start nor due date-time, it MUST NOT + // define a `recurrenceRules` property. + // + // If multiple recurrence rules are given, each rule is to be applied, and then the union of the results are used, + // ignoring any duplicates. + // + // TODO UPDATE RFC + RecurrenceRule *RecurrenceRule `json:"recurrenceRule,omitempty"` + + // This defines a set of recurrence rules (repeating patterns) for date-times on which the object will not occur. + // + // The rules are interpreted the same as for the `recurrenceRules` property, with the exception that the initial + // date-time to which the rule is applied (the `"start"` date-time for events or the `"start"` or `"due"` + // date-time for tasks) is only considered part of the expansion if it matches the rule. + // + // The resulting set of date-times is then removed from those generated by the `recurrenceRules` property. + ExcludedRecurrenceRules []RecurrenceRule `json:"excludedRecurrenceRules,omitempty"` + + // Maps recurrence ids (the date-time produced by the recurrence rule) to the overridden properties of the + // recurrence instance. + // + // If the recurrence id does not match a date-time from the recurrence rule (or no rule is specified), it + // is to be treated as an additional occurrence (like an `RDATE` from iCalendar). + // + // The patch object may often be empty in this case. + // + // If the patch object defines the `excluded` property of an occurrence to be `true`, this occurrence is + // omitted from the final set of recurrences for the calendar object (like an `EXDATE` from iCalendar). + // + // Such a patch object MUST NOT patch any other property. + // + // By default, an occurrence inherits all properties from the main object except the start (or due) + // date-time, which is shifted to match the recurrence id `LocalDateTime`. + // + // However, individual properties of the occurrence can be modified by a patch or multiple patches. + // + // It is valid to patch the `start` property value, and this patch takes precedence over the value + // generated from the recurrence id. + // + // Both the recurrence id as well as the patched start date-time may occur before the original JSCalendar + // object's start or due date. + // + // A pointer in the `PatchObject` MUST be ignored if it starts with one of the following prefixes: + // !- `@type` + // !- `excludedRecurrenceRules` + // !- `method` + // !- `privacy` + // !- `prodId` + // !- `recurrenceId` + // !- `recurrenceIdTimeZone` + // !- `recurrenceOverrides` + // !- `recurrenceRules` + // !- `relatedTo` + // !- `replyTo` + // !- `sentBy` + // !- `timeZones` + // !- `uid` + RecurrenceOverrides map[LocalDateTime]PatchObject `json:"recurrenceOverrides,omitempty"` + + // This defines if this object is an overridden, excluded instance of a recurring JSCalendar object. + // + // If this property value is `true`, this calendar object instance MUST be removed from the occurrence expansion. + // + // The absence of this property, or the presence of its default value as `false`, indicates that this + // instance MUST be included in the occurrence expansion. + Excluded bool `json:"excluded,omitzero"` + + // This specifies a priority for the calendar object. + // + // This may be used as part of scheduling systems to help resolve conflicts for a time period. + // + // The priority is specified as an integer in the range `0` to `9`. + // + // A value of `0` specifies an undefined priority, for which the treatment will vary by situation. + // + // A value of `1` is the highest priority. + // + // A value of `2` is the second highest priority. + // + // Subsequent numbers specify a decreasing ordinal priority. + // + // A value of `9` is the lowest priority. + // + // Other integer values are reserved for future use. + Priority int `json:"priority,omitzero"` + + // This specifies how this calendar object should be treated when calculating free-busy state. + // + // This MUST be one of the following values, another value registered in the IANA + // "JSCalendar Enum Values" registry, or a vendor-specific value (see Section 3.3): + // !- `free` + // !- `busy` (default) + FreeBusyStatus FreeBusyStatus `json:"freeBusyStatus,omitempty"` + + // Privacy level. + // + // Calendar objects are normally collected together and may be shared with other users. + // The `privacy` property allows the object owner to indicate that it should not be shared or should + // only have the time information shared but the details withheld. + // + // Enforcement of the restrictions indicated by this property is up to the API via which this object is accessed. + // + // This property MUST NOT affect the information sent to scheduled participants; it is only + // interpreted by protocols that share the calendar objects belonging to one user with other users. + // + // The value MUST be one of the following values, another value registered in the IANA "JSCalendar Enum Values" + // registry, or a vendor-specific value (see Section 3.3). + // + // Any value the client or server doesn't understand should be preserved but treated as equivalent to private. + // + // !- `public`: The full details of the object are visible to those whom the object's calendar is shared with. + // !- `private`: The details of the object are hidden; only the basic time and metadata are shared. + // !- `secret`: The object is hidden completely (as though it did not exist) when the calendar this object is in is shared. + // + // When the `privacy` property is set to `private`, the following properties MAY be shared; any other + // properties MUST NOT be shared: + // !- `@type` + // !- `created` + // !- `due` + // !- `duration` + // !- `estimatedDuration` + // !- `freeBusyStatus` + // !- `privacy` + // !- `recurrenceOverrides` (Only patches that apply to another permissible property are allowed to be shared.) + // !- `sequence` + // !- `showWithoutTime` + // !- `start` + // !- `timeZone` + // !- `timeZones` + // !- `uid` + // !- `updated` + Privacy Privacy `json:"privacy,omitempty"` + + // This represents methods by which participants may submit their response to the organizer of the calendar object. + // + // The keys in the property value are the available methods and MUST only contain ASCII alphanumeric characters + // (`A-Za-z0-9`). The value is a URI for the method specified in the key. + // + // Future methods may be defined in future specifications and registered with IANA; a calendar client MUST + // ignore any method it does not understand but MUST preserve the method key and URI. + // + // This property MUST be omitted if no method is defined (rather than being specified as an empty object). + // + // The following methods are defined: + // !- `imip`: The organizer accepts an iCalendar Message-Based Interoperability Protocol (iMIP) + // [RFC6047] response at this email address. The value MUST be a `mailto:` URI. + // !- `web`: Opening this URI in a web browser will provide the user with a page where they can + // submit a reply to the organizer. The value MUST be a URL using the `https:` scheme. + // !- `other`: The organizer is identified by this URI, but the method for submitting the response + // is undefined. + ReplyTo map[ReplyMethod]string `json:"replyTo,omitempty"` + + // This is the email address in the `"From"` header of the email in which this calendar object was received. + // + // This is only relevant if the calendar object is received via iMIP or as an attachment to a message. + // + // If set, the value MUST be a valid addr-spec value as defined in Section 3.4.1 of [RFC5322]. + SentBy string `json:"sentBy,omitempty"` + + // This is a map of participant ids to participants, describing their participation in the calendar object. + // + // If this property is set and any participant has a `sendTo` property, then the `replyTo` property of this + // calendar object MUST define at least one reply method. + Participants map[string]Participant `json:"participants,omitempty"` + + // A request status as returned from processing the most recent scheduling request for this JSCalendar object. + // + // The allowed values are defined by the ABNF definitions of `statcode`, `statdesc` and `extdata` in + // Section 3.8.8.3 of [RFC5545] and the following ABNF [RFC5234]: + // + // ```text + // reqstatus = statcode ";" statdesc [";" extdata] + // ``` + // + // Servers MUST only add or change this property when they performe a scheduling action. + // + // Clients SHOULD NOT change or remove this property if it was provided by the server. + // + // Clients MAY add, change, or remove the property when the client is handling the scheduling. + // + // This property MUST only be included in scheduling messages according to the rules defined for the + // `REQUEST-STATUS` iCalendar property in [RFC5546]. + RequestStatus string `json:"requestStatus,omitempty"` + + // If `true`, use the user's default alerts and ignore the value of the alerts property. + // + // Fetching user defaults is dependent on the API from which this JSCalendar object is being fetched and + // is not defined in this specification. + // + // If an implementation cannot determine the user's default alerts, or none are set, it MUST process + // he alerts property as if `useDefaultAlerts` is set to false. + // + // Default: false + UseDefaultAlerts bool `json:"useDefaultAlerts,omitzero"` + + // This is a map of alert ids to Alert objects, representing alerts/reminders to display or send + // to the user for this calendar object. + Alerts map[string]Alert `json:"alerts,omitempty"` + + // A map where each key is a language tag [RFC5646], and the corresponding value is a set of patches + // to apply to the calendar object in order to localize it into that locale. + // + // See the description of PatchObject (Section 1.4.9) for the structure of the PatchObject. + // + // The patches are applied to the top-level calendar object. In addition, the locale property of the patched + // object is set to the language tag. + // + // All pointers for patches MUST end with one of the following suffixes; any patch that does not follow + // this MUST be ignored unless otherwise specified in a future RFC: + // !- `title` + // !- `description` + // !- `name` + // + // A patch MUST NOT have the prefix `recurrenceOverrides`; any localization of the override MUST be a + // patch to the `localizations` property inside the override instead. + // + // For example, a patch to `locations/abcd1234/title` is permissible, but a patch to `uid` or + // `recurrenceOverrides/2020-01-05T14:00:00/title` is not. + // + // Note that this specification does not define how to maintain validity of localized content. + // + // For example, a client application changing a JSCalendar object's `title` property might also + // need to update any localizations of this property. Client implementations SHOULD provide the means + // to manage localizations, but how to achieve this is specific to the application's workflow and requirements. + Localizations map[string]PatchObject `json:"localizations,omitempty"` + + // This identifies the time zone the object is scheduled in or is null for floating time. + // + // This is either a name from the IANA Time Zone Database [TZDB] or the `TimeZoneId` of a custom time zone + // from the `timeZones property`. + // + // If omitted, this MUST be presumed to be `null` (i.e., floating time). + TimeZone string `json:"timeZone,omitempty"` + + // If true, any user may add themselves to the event as a participant with the + // `attendee` role. + // + // This property MUST NOT be altered in the `recurrenceOverrides`; it may only be set on the base object. + // + // This indicates the event will accept "party crasher" RSVPs via iTIP, subject to any + // other domain-specific restrictions, and users may add themselves to the event via JMAP as + // long as they have the mayRSVP permission for the calendar. + // + // This is a JMAP addition to JSCalendar. + // + // default: false + MayInviteSelf bool `json:"mayInviteSelf,omitzero"` + + // If true, any current participant with the `attendee` role may add new participants with the + // `attendee` role to the event. + // + // This property MUST NOT be altered in the `recurrenceOverrides`; it may only be set on the base object. + // + // The `mayRSVP` permission for the calendar is also required in conjunction with this event property + // for users to be allowed to make this change via JMAP. + // + // This is a JMAP addition to JSCalendar. + // + // default: false + MayInviteOthers bool `json:"mayInviteOthers,omitzero"` + + // If true, only the owners of the event may see the full set of participants. + // + // Other sharees of the event may only see the owners and themselves. + // + // This property MUST NOT be altered in the `recurrenceOverrides`; it may only be set on the base object. + // + // This is a JMAP addition to JSCalendar. + // + // default: false + HideAttendees bool `json:"hideAttendees,omitzero"` +} + +type Event struct { + Type TypeOfEvent `json:"@type,omitempty"` + + Object + + // This is the date/time the event starts in the event's time zone (as specified in the timeZone property, see Section 4.7.1). + Start LocalDateTime `json:"start"` + + // This is the zero or positive duration of the event in the event's start time zone. + // + // The end time of an event can be found by adding the duration to the event's start time. + // + // An Event MAY involve start and end locations that are in different time zones + // (e.g., a transcontinental flight). This can be expressed using the `relativeTo` and `timeZone` properties of + // the `Event`'s Location objects (see Section 4.2.5). + Duration Duration `json:"duration,omitempty"` + + // This identifies the time zone in which this event ends, for cases where the start and time zones of the event differ + // (e.g., a transcontinental flight). + // + // If this property is not set, then the event starts and ends in the same time zone. + // + // This property MUST NOT be set if the timeZone property value is null or not set. + EndTimeZone string `json:"endTimeZone,omitempty"` + + // This is the scheduling status (Section 4.4) of an Event. + // + // If set, it MUST be one of the following values, another value registered in the IANA + // "JSCalendar Enum Values" registry, or a vendor-specific value (see Section 3.3): + // !- `confirmed`: indicates the event is definitely happening + // !- `cancelled`: indicates the event has been cancelled + // !- `tentative`: indicates the event may happen + Status Status `json:"status,omitempty"` +} + +type Task struct { + Type TypeOfTask `json:"@type,omitempty"` + + Object + + // This is the date/time the task is due in the task's time zone. + Due LocalDateTime `json:"due,omitzero"` + + // This the date/time the task should start in the task's time zone. + Start LocalDateTime `json:"start,omitzero"` + + // This specifies the estimated positive duration of time the task takes to complete. + EstimatedDuration Duration `json:"estimatedDuration,omitempty"` + + // This represents the percent completion of the task overall. + // + // The property value MUST be a positive integer between `0` and `100`. + PercentComplete uint `json:"percentComplete,omitzero"` + + // This defines the progress of this task. + // + // If omitted, the default progress (Section 4.4) of a Task is defined as follows (in order of evaluation): + // !- `completed`: if the progress property value of all participants is completed + // !- `failed`: if at least one progress property value of a participant is failed + // !- `in-process`: if at least one progress property value of a participant is in-process + // !- `needs-action`: if none of the other criteria match + // + // If set, it MUST be one of the following values, another value registered in the IANA "JSCalendar Enum Values" + // registry, or a vendor-specific value (see Section 3.3): + // !- `needs-action`: indicates the task needs action + // !- `in-process`: indicates the task is in process + // !- `completed`: indicates the task is completed + // !- `failed`: indicates the task failed + // !- `cancelled`: indicates the task was cancelled + Progress Progress `json:"progress,omitempty"` + + // This specifies the date/time the progress property of either the task overall (Section 5.2.5) or + // a specific participant (Section 4.4.6) was last updated. + // + // If the task is recurring and has future instances, a client may want to keep track of the last progress + // update timestamp of a specific task recurrence but leave other instances unchanged. + // + // One way to achieve this is by overriding the `progressUpdated` property in the task `recurrenceOverrides` property. + // + // However, this could produce a long list of timestamps for regularly recurring tasks. + // + // An alternative approach is to split the `Task` into a current, single instance of `Task` with this instance + // progress update time and a future recurring instance. + // + // See also Section 4.1.3 on splitting. + ProgressUpdated time.Time `json:"progressUpdated,omitzero"` +} + +type GroupEntry interface { + groupEntry() +} + +func (e Event) groupEntry() {} + +var _ GroupEntry = Event{} + +func (t Task) groupEntry() {} + +var _ GroupEntry = Task{} + +type Group struct { + Type TypeOfGroup `json:"@type,omitempty"` + + CommonObject + + // This is a collection of group members. + // + // Implementations MUST ignore entries of unknown type. + Entries []GroupEntry `json:"entries"` + + // This is the source from which updated versions of this group may be retrieved. + // + // The value MUST be a URI. + Source string `json:"source,omitempty"` +} + +func (g *Group) UnmarshalJSON(b []byte) error { + var typ struct { + Entries []struct { + Type string `json:"@type"` + } `json:"entries"` + } + if err := json.Unmarshal(b, &typ); err != nil { + return err + } + entries := make([]GroupEntry, len(typ.Entries)) + for i, entry := range typ.Entries { + switch entry.Type { + case string(EventType): + entries[i] = new(Event) + case string(TaskType): + entries[i] = new(Task) + default: + return fmt.Errorf("unsupported '%T.type' @type: \"%v\"", entry, entry.Type) + } + } + + type tmp Group + return json.Unmarshal(b, (*tmp)(g)) +} + +// mlr --csv --headerless-csv-output cut -f Token ./location-type-registry-1.csv |sort|perl -ne 'chomp; print "LocationTypeOption".ucfirst($_)." = LocationTypeOption(\"".$_."\")\n"' diff --git a/pkg/jscalendar/jscalendar_model_test.go b/pkg/jscalendar/jscalendar_model_test.go new file mode 100644 index 0000000000..e54c3e5a44 --- /dev/null +++ b/pkg/jscalendar/jscalendar_model_test.go @@ -0,0 +1,716 @@ +package jscalendar + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func jsoneq[X any](t *testing.T, expected string, object X) { + data, err := json.MarshalIndent(object, "", "") + require.NoError(t, err) + require.JSONEq(t, expected, string(data)) + + var rec X + err = json.Unmarshal(data, &rec) + require.NoError(t, err) + require.Equal(t, object, rec) +} + +/* +func TestLocalDateTime(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14+02:00") + require.NoError(t, err) + + ldt := &LocalDateTime{ts} + + str, err := json.MarshalIndent(ldt, "", "") + require.NoError(t, err) + + require.Equal(t, "\"2025-09-25T16:26:14\"", string(str)) +} + +func TestLocalDateTimeUnmarshalling(t *testing.T) { + ts, err := time.Parse(RFC3339Local, "2025-09-25T18:26:14") + require.NoError(t, err) + u := ts.UTC() + + var result LocalDateTime + err = json.Unmarshal([]byte("\"2025-09-25T18:26:14Z\""), &result) + require.NoError(t, err) + + require.Equal(t, result, LocalDateTime{u}) +} +*/ + +func TestRelation(t *testing.T) { + jsoneq(t, `{ + "@type": "Relation", + "relation": { + "first": true, + "parent": true + } + }`, Relation{ + Type: RelationType, + Relation: map[Relationship]bool{ + RelationshipFirst: true, + RelationshipParent: true, + }, + }) +} + +func TestLink(t *testing.T) { + jsoneq(t, `{ + "@type": "Link", + "href": "https://opencloud.eu.example.com/f72ae875-40be-48a4-84ff-aea9aed3e085.png", + "contentType": "image/png", + "size": 128912, + "rel": "icon", + "display": "thumbnail", + "title": "the logo" + }`, Link{ + Type: LinkType, + Href: "https://opencloud.eu.example.com/f72ae875-40be-48a4-84ff-aea9aed3e085.png", + ContentType: "image/png", + Size: 128912, + Rel: RelIcon, + Display: DisplayThumbnail, + Title: "the logo", + }) +} + +func TestLocation(t *testing.T) { + jsoneq(t, `{ + "@type": "Location", + "name": "The Eiffel Tower", + "locationTypes": { + "landmark-address": true, + "industrial": true + }, + "coordinates": "geo:48.8559324,2.2932441", + "links": { + "l1": { + "@type": "Link", + "href": "https://upload.wikimedia.org/wikipedia/commons/f/fd/Eiffel_blue.PNG", + "contentType": "image/png", + "size": 12345, + "rel": "icon", + "display": "A blue Eiffel tower", + "title": "Blue Eiffel Tower" + } + } + }`, Location{ + Type: LocationType, + Name: "The Eiffel Tower", + LocationTypes: map[LocationTypeOption]bool{ + LocationTypeOptionLandmarkAddress: true, + LocationTypeOptionIndustrial: true, + }, + Coordinates: "geo:48.8559324,2.2932441", + Links: map[string]Link{ + "l1": { + Type: LinkType, + Href: "https://upload.wikimedia.org/wikipedia/commons/f/fd/Eiffel_blue.PNG", + ContentType: "image/png", + Size: 12345, + Rel: RelIcon, + Display: "A blue Eiffel tower", + Title: "Blue Eiffel Tower", + }, + }, + }) +} + +func TestVirtualLocation(t *testing.T) { + jsoneq(t, `{ + "@type": "VirtualLocation", + "name": "OpenTalk", + "uri": "https://opentalk.eu", + "features": { + "video": true, + "screen": true, + "audio": true + } + }`, VirtualLocation{ + Type: VirtualLocationType, + Name: "OpenTalk", + Uri: "https://opentalk.eu", + Features: map[VirtualLocationFeature]bool{ + VirtualLocationFeatureVideo: true, + VirtualLocationFeatureScreen: true, + VirtualLocationFeatureAudio: true, + }, + }) +} + +func TestNDay(t *testing.T) { + jsoneq(t, `{ + "@type": "NDay", + "day": "fr", + "nthOfPeriod": -1 + }`, NDay{ + Type: NDayType, + Day: DayOfWeekFriday, + NthOfPeriod: -1, + }) +} + +func TestRecurrenceRule(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14+02:00") + require.NoError(t, err) + ts = ts.UTC() + l := LocalDateTime("2025-09-25T16:26:14") + + jsoneq(t, `{ + "@type": "RecurrenceRule", + "frequency": "daily", + "interval": 1, + "rscale": "iso8601", + "skip": "forward", + "firstDayOfWeek": "mo", + "byDay": [ + {"@type": "NDay", "day": "mo", "nthOfPeriod": -1}, + {"@type": "NDay", "day": "tu"}, + {"day": "we"} + ], + "byMonthDay": [1, 10, 31], + "byMonth": ["1", "31L"], + "byYearDay": [-1, 366], + "byWeekNo": [-53, 53], + "byHour": [0, 23], + "byMinute": [0, 59], + "bySecond": [0, 39], + "bySetPosition": [-3, 3], + "count": 2, + "until": "2025-09-25T16:26:14" + }`, RecurrenceRule{ + Type: RecurrenceRuleType, + Frequency: FrequencyDaily, + Interval: 1, + Rscale: RscaleIso8601, + Skip: SkipForward, + FirstDayOfWeek: DayOfWeekMonday, + ByDay: []NDay{ + { + Type: NDayType, + Day: DayOfWeekMonday, + NthOfPeriod: -1, + }, + { + Type: NDayType, + Day: DayOfWeekTuesday, + }, + { + Day: DayOfWeekWednesday, + NthOfPeriod: 0, + }, + }, + ByMonthDay: []int{1, 10, 31}, + ByMonth: []string{"1", "31L"}, + ByYearDay: []int{-1, 366}, + ByWeekNo: []int{-53, 53}, + ByHour: []uint{0, 23}, + ByMinute: []uint{0, 59}, + BySecond: []uint{0, 39}, + BySetPosition: []int{-3, 3}, + Count: 2, + Until: &l, + }) +} + +func TestParticipant(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14+02:00") + require.NoError(t, err) + ts = ts.UTC() + + ts2, err := time.Parse(time.RFC3339, "2025-09-29T14:32:19+02:00") + require.NoError(t, err) + ts2 = ts2.UTC() + + jsoneq(t, `{ + "@type": "Participant", + "name": "Camina Drummer", + "email": "camina@opa.org", + "description": "Camina Drummer is a Belter serving as the current President of the Transport Union.", + "calendarAddress": "cdrummer@itip.opa.org", + "kind": "individual", + "roles": { + "owner": true, + "chair": true + }, + "locationId": "98faaa01-b6db-4ddb-9574-e28ab83104e6", + "language": "en-JM", + "participationStatus": "accepted", + "participationComment": "always there", + "expectReply": true, + "scheduleAgent": "server", + "scheduleForceSend": true, + "scheduleSequence": 3, + "scheduleStatus": [ + "3.1", + "2.0" + ], + "scheduleUpdated": "2025-09-25T16:26:14Z", + "sentBy": "adawes@opa.org", + "invitedBy": "346be402-c340-4f3f-ac51-e4aa9955af4f", + "delegatedTo": { + "93230b90-70c6-4027-b2c1-3629877bfea5": true, + "f5fae398-cfa3-4873-bbc7-0ca9d51de5b0": true + }, + "delegatedFrom": { + "a9c1c1a1-fecf-4214-a803-1ee209e2dbec": true + }, + "memberOf": { + "0f41473b-0edd-494d-b346-8d039009a2a5": true + }, + "links":{ + "l1": { + "@type": "Link", + "href": "https://opa.org/opa.png", + "contentType": "image/png", + "size": 182912, + "rel": "icon", + "display": "Logo", + "title": "OPA" + } + }, + "progress": "in-process", + "progressUpdated": "2025-09-29T12:32:19Z", + "percentComplete": 42 + }`, Participant{ + Type: ParticipantType, + Name: "Camina Drummer", + Email: "camina@opa.org", + Description: "Camina Drummer is a Belter serving as the current President of the Transport Union.", + CalendarAddress: "cdrummer@itip.opa.org", + Kind: ParticipantKindIndividual, + Roles: map[Role]bool{ + RoleOwner: true, + RoleChair: true, + }, + LocationId: "98faaa01-b6db-4ddb-9574-e28ab83104e6", + Language: "en-JM", + ParticipationStatus: ParticipationStatusAccepted, + ParticipationComment: "always there", + ExpectReply: true, + ScheduleAgent: ScheduleAgentServer, + ScheduleForceSend: true, + ScheduleSequence: 3, + ScheduleStatus: []string{ + "3.1", + "2.0", + }, + ScheduleUpdated: ts, + SentBy: "adawes@opa.org", + InvitedBy: "346be402-c340-4f3f-ac51-e4aa9955af4f", + DelegatedTo: map[string]bool{ + "93230b90-70c6-4027-b2c1-3629877bfea5": true, + "f5fae398-cfa3-4873-bbc7-0ca9d51de5b0": true, + }, + DelegatedFrom: map[string]bool{ + "a9c1c1a1-fecf-4214-a803-1ee209e2dbec": true, + }, + MemberOf: map[string]bool{ + "0f41473b-0edd-494d-b346-8d039009a2a5": true, + }, + Links: map[string]Link{ + "l1": { + Type: LinkType, + Href: "https://opa.org/opa.png", + ContentType: "image/png", + Size: 182912, + Rel: RelIcon, + Display: "Logo", + Title: "OPA", + }, + }, + Progress: ProgressInProcess, + ProgressUpdated: ts2, + PercentComplete: 42, + }) +} + +func TestAlertWithAbsoluteTrigger(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14+02:00") + require.NoError(t, err) + ts = ts.UTC() + + jsoneq(t, `{ + "@type": "Alert", + "trigger": { + "@type": "AbsoluteTrigger", + "when": "2025-09-25T16:26:14Z" + }, + "acknowledged": "2025-09-25T16:26:14Z", + "relatedTo": { + "a2e729eb-7d9c-4ea7-8514-93d2590ef0a2": { + "@type": "Relation", + "relation": { + "first": true + } + } + }, + "action": "email" + }`, Alert{ + Type: AlertType, + Trigger: &AbsoluteTrigger{ + Type: AbsoluteTriggerType, + When: ts, + }, + Acknowledged: ts, + RelatedTo: map[string]Relation{ + "a2e729eb-7d9c-4ea7-8514-93d2590ef0a2": { + Type: RelationType, + Relation: map[Relationship]bool{ + RelationshipFirst: true, + }, + }, + }, + Action: AlertActionEmail, + }) +} + +func TestAlertWithOffsetTrigger(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14+02:00") + require.NoError(t, err) + ts = ts.UTC() + + jsoneq(t, `{ + "@type": "Alert", + "trigger": { + "@type": "OffsetTrigger", + "offset": "-PT5M", + "relativeTo": "end" + }, + "acknowledged": "2025-09-25T16:26:14Z", + "relatedTo": { + "a2e729eb-7d9c-4ea7-8514-93d2590ef0a2": { + "@type": "Relation", + "relation": { + "first": true + } + } + }, + "action": "email" + }`, Alert{ + Type: AlertType, + Trigger: &OffsetTrigger{ + Type: OffsetTriggerType, + Offset: "-PT5M", + RelativeTo: RelativeToEnd, + }, + Acknowledged: ts, + RelatedTo: map[string]Relation{ + "a2e729eb-7d9c-4ea7-8514-93d2590ef0a2": { + Type: RelationType, + Relation: map[Relationship]bool{ + RelationshipFirst: true, + }, + }, + }, + Action: AlertActionEmail, + }) +} + +func TestAlertWithUnknownTrigger(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14+02:00") + require.NoError(t, err) + ts = ts.UTC() + + jsoneq(t, `{ + "@type": "Alert", + "trigger": { + "@type": "XYZTRIGGER", + "abc": 123, + "xyz": "zzz" + }, + "acknowledged": "2025-09-25T16:26:14Z", + "relatedTo": { + "a2e729eb-7d9c-4ea7-8514-93d2590ef0a2": { + "@type": "Relation", + "relation": { + "first": true + } + } + }, + "action": "email" + }`, Alert{ + Type: AlertType, + Trigger: &UnknownTrigger{ + "@type": "XYZTRIGGER", + "abc": 123.0, + "xyz": "zzz", + }, + Acknowledged: ts, + RelatedTo: map[string]Relation{ + "a2e729eb-7d9c-4ea7-8514-93d2590ef0a2": { + Type: RelationType, + Relation: map[Relationship]bool{ + RelationshipFirst: true, + }, + }, + }, + Action: AlertActionEmail, + }) +} + +func TestTimeZoneRule(t *testing.T) { + l1 := LocalDateTime("2025-09-25T16:26:14") + + jsoneq(t, `{ + "@type": "TimeZoneRule", + "start": "2025-09-25T16:26:14", + "offsetFrom": "-0200", + "offsetTo": "+0200", + "recurrenceRules": [ + { + "@type": "RecurrenceRule", + "frequency": "weekly", + "interval": 2, + "rscale": "iso8601", + "skip": "omit", + "firstDayOfWeek": "mo", + "byDay": [ + { + "@type": "NDay", + "day": "fr" + } + ], + "byHour": [14], + "byMinute": [0], + "count": 4 + } + ], + "recurrenceOverrides": { + "2025-09-25T16:26:14": {} + }, + "names": { + "CEST": true + }, + "comments": ["this is a comment"] + }`, TimeZoneRule{ + Type: TimeZoneRuleType, + Start: l1, + OffsetFrom: "-0200", + OffsetTo: "+0200", + RecurrenceRules: []RecurrenceRule{ + { + Type: RecurrenceRuleType, + Frequency: FrequencyWeekly, + Interval: 2, + Rscale: RscaleIso8601, + Skip: SkipOmit, + FirstDayOfWeek: DayOfWeekMonday, + ByDay: []NDay{ + { + Type: NDayType, + Day: DayOfWeekFriday, + }, + }, + ByHour: []uint{ + 14, + }, + ByMinute: []uint{ + 0, + }, + Count: 4, + }, + }, + RecurrenceOverrides: map[LocalDateTime]PatchObject{ + l1: {}, + }, + Names: map[string]bool{ + "CEST": true, + }, + Comments: []string{ + "this is a comment", + }, + }) +} + +func TestTimeZone(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T16:26:14+02:00") + require.NoError(t, err) + ts = ts.UTC() + l := LocalDateTime("2025-09-25T16:26:14") + + jsoneq(t, `{ + "@type": "TimeZone", + "tzId": "cest", + "updated": "2025-09-25T14:26:14Z", + "url": "https://timezones.net/cest", + "validUntil": "2025-09-25T14:26:14Z", + "aliases": { + "cet": true + }, + "standard": [{ + "@type": "TimeZoneRule", + "start": "2025-09-25T16:26:14", + "offsetFrom": "-0200", + "offsetTo": "+1245" + }], + "daylight": [{ + "@type": "TimeZoneRule", + "start": "2025-09-25T16:26:14", + "offsetFrom": "-0200", + "offsetTo": "+1245" + }] + }`, TimeZone{ + Type: TimeZoneType, + TzId: "cest", + Updated: ts, + Url: "https://timezones.net/cest", + ValidUntil: ts, + Aliases: map[string]bool{ + "cet": true, + }, + Standard: []TimeZoneRule{ + { + Type: TimeZoneRuleType, + Start: l, + OffsetFrom: "-0200", + OffsetTo: "+1245", + }, + }, + Daylight: []TimeZoneRule{ + { + Type: TimeZoneRuleType, + Start: l, + OffsetFrom: "-0200", + OffsetTo: "+1245", + }, + }, + }) +} + +func TestEvent(t *testing.T) { + local1 := "2025-09-25T16:26:14" + ts1, err := time.Parse(time.RFC3339, local1+"+02:00") + require.NoError(t, err) + ts1 = ts1.UTC() + + local2 := "2025-09-29T13:53:01" + ts2, err := time.Parse(time.RFC3339, local2+"+02:00") + require.NoError(t, err) + ts2 = ts2.UTC() + + l := LocalDateTime("2025-09-25T16:26:14") + + jsoneq(t, `{ + "@type": "Event", + "start": "2025-09-25T16:26:14", + "duration": "PT10M", + "status": "confirmed", + "uid": "b422cfec-f7b4-4e04-8ec6-b794007f63f1", + "prodId": "OpenCloud 1.0", + "created": "2025-09-25T16:26:14", + "updated": "2025-09-29T13:53:01", + "title": "End of year party", + "description": "It's the party at the end of the year.", + "descriptionContentType": "text/plain", + "links": { + "l1": { + "@type": "Link", + "href": "https://opencloud.eu/eoy-party/2025", + "contentType": "text/html", + "rel": "about" + } + }, + "locale": "en-GB", + "keywords": { + "k1": true + }, + "categories": { + "cat": true + }, + "color": "oil", + "relatedTo": { + "a": { + "@type": "Relation", + "relation": { + "next": true + } + } + }, + "sequence": 3, + "showWithoutTime": true, + "locations": { + "loc1": { + "@type": "Location", + "name": "Steel Cactus Mexican Grill", + "locationTypes": { + "bar": true + }, + "coordinates": "geo:16.7685657,-4.8629852", + "links": { + "l1": { + "@type": "Link", + "href": "https://mars.gov/bars/steelcactus", + "rel": "about" + } + } + } + } + }`, Event{ + Type: EventType, + Start: l, + Duration: "PT10M", + Status: "confirmed", + Object: Object{ + CommonObject: CommonObject{ + Uid: "b422cfec-f7b4-4e04-8ec6-b794007f63f1", + ProdId: "OpenCloud 1.0", + Created: UTCDateTime(local1), + Updated: UTCDateTime(local2), + Title: "End of year party", + Description: "It's the party at the end of the year.", + DescriptionContentType: "text/plain", + Links: map[string]Link{ + "l1": { + Type: LinkType, + Href: "https://opencloud.eu/eoy-party/2025", + ContentType: "text/html", + Rel: RelAbout, + }, + }, + Locale: "en-GB", + Keywords: map[string]bool{ + "k1": true, + }, + Categories: map[string]bool{ + "cat": true, + }, + Color: "oil", + }, + RelatedTo: map[string]Relation{ + "a": { + Type: RelationType, + Relation: map[Relationship]bool{ + RelationshipNext: true, + }, + }, + }, + Sequence: 3, + ShowWithoutTime: true, + Locations: map[string]Location{ + "loc1": { + Type: LocationType, + Name: "Steel Cactus Mexican Grill", + LocationTypes: map[LocationTypeOption]bool{ + LocationTypeOptionBar: true, + }, + Coordinates: "geo:16.7685657,-4.8629852", + Links: map[string]Link{ + "l1": { + Type: LinkType, + Href: "https://mars.gov/bars/steelcactus", + Rel: RelAbout, + }, + }, + }, + }, + }, + }) +} diff --git a/pkg/jscontact/jscontact_model.go b/pkg/jscontact/jscontact_model.go new file mode 100644 index 0000000000..36cfbd8739 --- /dev/null +++ b/pkg/jscontact/jscontact_model.go @@ -0,0 +1,2450 @@ +// Implementation of the RFC 9553 JSContact data model, with JMAP Contacts additions. +// +// https://www.rfc-editor.org/rfc/rfc9553 +// +// https://jmap.io/spec-contacts.html +package jscontact + +import ( + "encoding/json" + "fmt" + "time" +) + +// The kind of the name component. +// +// !- `title`: an honorific title or prefix, e.g., `Mr.`, `Ms.`, or `Dr.` +// !- `given`: a given name, also known as "first name" or "personal name" +// !- `given2`: a name that appears between the given and surname such as a middle name or patronymic name +// !- `surname`: a surname, also known as "last name" or "family name" +// !- `surname2`: a secondary surname (used in some cultures), also known as "maternal surname" +// !- `credential`: a credential, also known as "accreditation qualifier" or "honorific suffix", e.g., `B.A.`, `Esq.` +// !- `generation`: a generation marker or qualifier, e.g., `Jr.` or `III` +// !- `separator`: a formatting separator between two ordered name non-separator components; the value property of the component includes the verbatim separator, for example, a hyphen character or even an empty string. This value has higher precedence than the defaultSeparator property of the Name. Implementations MUST NOT insert two consecutive separator components; instead, they SHOULD insert a single separator component with the combined value; this component kind MUST NOT be set if the `Name` `isOrdered` property value is `false` +type NameComponentKind string + +// The kind of the address component. +// +// The enumerated values are: +// ! `room`: the room, suite number, or identifier +// ! `apartment`: the extension designation such as the apartment number, unit, or box number +// ! `floor`: the floor or level the address is located on +// ! `building`: the building, tower, or condominium the address is located in +// ! `number`: the street number, e.g., `"123"`; this value is not restricted to numeric values and can include any value such +// as number ranges (`"112-10"`), grid style (`"39.2 RD"`), alphanumerics (`"N6W23001"`), or fractionals (`"123 1/2"`) +// ! `name`: the street name +// ! `block`: the block name or number +// ! `subdistrict`: the subdistrict, ward, or other subunit of a district +// ! `district`: the district name +// ! `locality`: the municipality, city, town, village, post town, or other locality +// ! `region`: the administrative area such as province, state, prefecture, county, or canton +// ! `postcode`: the postal code, post code, ZIP code, or other short code associated with the address by the relevant country's postal system +// ! `country`: the country name +// ! `direction`: the cardinal direction or quadrant, e.g., "north" +// ! `landmark`: the publicly known prominent feature that can substitute the street name and number, e.g., "White House" or "Taj Mahal" +// ! `postOfficeBox`: the post office box number or identifier +// ! `separator`: a formatting separator between two ordered address non-separator components; the value property of the component includes the +// verbatim separator, for example, a hyphen character or even an empty string; this value has higher precedence than the `defaultSeparator` property +// of the `Address`; implementations MUST NOT insert two consecutive separator components; instead, they SHOULD insert a single separator component +// with the combined value; this component kind MUST NOT be set if the `Address` `isOrdered` property value is `false`. +type AddressComponentKind string + +// The relationship of the related Card to the Card, defined as a set of relation types. +// +// The keys in the set define the relation type; the values for each key in the set MUST be "true". +// +// The relationship between the two objects is undefined if the set is empty. +// +// The initial list of enumerated relation types matches the IANA-registered TYPE `IANA-vCard“ +// parameter values of the vCard RELATED property ([Section 6.6.6 of RFC6350]): +// !- `acquaintance` +// !- `agent` +// !- `child` +// !- `co-resident` +// !- `co-worker` +// !- `colleague` +// !- `contact` +// !- `crush` +// !- `date` +// !- `emergency` +// !- `friend` +// !- `kin` +// !- `me` +// !- `met` +// !- `muse` +// !- `neighbor` +// !- `parent` +// !- `sibling` +// !- `spouse` +// !- `sweetheart` +// +// [Section 6.6.6 of RFC6350]: https://www.rfc-editor.org/rfc/rfc6350.html#section-6.6.6 +type Relationship string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type MediaContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type NicknameContext string + +// The contexts in which to use this address. +// +// The boolean value MUST be `true`. +// +// In addition to the common contexts, allowed key values are: +// ! `billing`: an address to be used for billing +// ! `delivery`: an address to be used for delivering physical items +type AddressContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type DirectoryContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type EmailAddressContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type OnlineServiceContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type OrganizationContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type PronounsContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type PhoneContext string + +// The set of contact features that the phone number may be used for. +// +// The set is represented as an object, with each key being a method type. +// +// The boolean value MUST be `true`. +// +// The enumerated values are: +// !- `mobile`: this number is for a mobile phone +// !- `voice`: this number supports calling by voice +// !- `text`: this number supports text messages (SMS) +// !- `video`: this number supports video conferencing +// !- `main-number`: this number is a main phone number such as the number of the front desk at a company, as opposed to a direct-dial number of an individual employee +// !- `textphone`: this number is for a device for people with hearing or speech difficulties +// !- `fax`: this number supports sending faxes +// !- `pager`: this number is for a pager or beeper +type PhoneFeature string + +// The organizational or situational kind of the title. +// +// Some organizations and individuals distinguish between titles as organizational +// positions and roles as more temporary assignments such as in project management. +// +// The enumerated values are: +// !- `title` +// !- `role` +type TitleKind string + +// The grammatical gender to use in salutations and other grammatical constructs. +// +// For example, the German language distinguishes by grammatical gender in salutations such as +// `Sehr geehrte` (feminine) and `Sehr geehrter` (masculine). +// +// The enumerated values are: +// !- `animate` +// !- `common` +// !- `feminine` +// !- `inanimate` +// !- `masculine` +// !- `neuter` +// +// Note that the grammatical gender does not allow inferring the gender identities or assigned +// sex of the contact. +type GrammaticalGenderType string + +// The kind of anniversary. +// +// The enumerated values are: +// ! `birth`: a birthday anniversary +// ! `death`: a deathday anniversary +// ! `wedding`: a wedding day anniversary +type AnniversaryKind string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type LanguagePrefContext string + +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type SchedulingAddressContext string + +// The kind of personal information. +// +// The enumerated values are: +// ! `expertise`: a field of expertise or a credential +// ! `hobby`: a hobby +// ! `interest`: an interest +type PersonalInfoKind string + +// The level of expertise or engagement in hobby or interest. +// +// The enumerated values are: +// ! `high` +// ! `medium` +// ! `low` +type PersonalInfoLevel string + +// The kind of the entity the Card represents (default: `individual“). +// +// Values are: +// !- `individual`: a single person +// !- `group`: a group of people or entities +// !- `org`: an organization +// !- `location`: a named location +// !- `device`: a device such as an appliance, a computer, or a network element +// !- `application`: a software application +// +// example: individual +type ContactCardKind string + +// The kind of the `Directory` resource. +// +// The allowed values are defined in the property definition that makes use of the Resource type. +// +// Some property definitions may change this property from being optional to mandatory. +// +// A contact card with a `kind` property equal to `group` represents a group of contacts. +// +// Clients often present these separately from other contact cards. +// +// The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other +// contacts that are the members of this group. +// +// Clients should consider the group to contain any `ContactCard` with a matching UID, from +// any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. +// +// UIDs that cannot be found SHOULD be ignored but preserved. +// +// For example, suppose a user adds contacts from a shared address book to their private group, then +// temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will +// disappear from the group. However, if they are given permission to access the data again the UIDs +// will be found and the contacts will reappear. +// +// [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members +type DirectoryKind string + +// The kind of the `Calendar` resource. +// +// The allowed values are defined in the property definition that makes use of the Resource type. +// +// Some property definitions may change this property from being optional to mandatory. +// +// A contact card with a `kind` property equal to `group` represents a group of contacts. +// +// Clients often present these separately from other contact cards. +// +// The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other +// contacts that are the members of this group. +// +// Clients should consider the group to contain any `ContactCard` with a matching UID, from +// any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. +// +// UIDs that cannot be found SHOULD be ignored but preserved. +// +// For example, suppose a user adds contacts from a shared address book to their private group, then +// temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will +// disappear from the group. However, if they are given permission to access the data again the UIDs +// will be found and the contacts will reappear. +// +// [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members +type CalendarKind string + +// The kind of the `Link` resource. +// +// The allowed values are defined in the property definition that makes use of the Resource type. +// +// Some property definitions may change this property from being optional to mandatory. +// +// A contact card with a `kind` property equal to `group` represents a group of contacts. +// +// Clients often present these separately from other contact cards. +// +// The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other +// contacts that are the members of this group. +// +// Clients should consider the group to contain any `ContactCard` with a matching UID, from +// any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. +// +// UIDs that cannot be found SHOULD be ignored but preserved. +// +// For example, suppose a user adds contacts from a shared address book to their private group, then +// temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will +// disappear from the group. However, if they are given permission to access the data again the UIDs +// will be found and the contacts will reappear. +// +// [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members +type LinkKind string + +// The kind of the `Media` resource. +// +// The allowed values are defined in the property definition that makes use of the Resource type. +// +// Some property definitions may change this property from being optional to mandatory. +// +// A contact card with a `kind` property equal to `group` represents a group of contacts. +// +// Clients often present these separately from other contact cards. +// +// The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other +// contacts that are the members of this group. +// +// Clients should consider the group to contain any `ContactCard` with a matching UID, from +// any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. +// +// UIDs that cannot be found SHOULD be ignored but preserved. +// +// For example, suppose a user adds contacts from a shared address book to their private group, then +// temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will +// disappear from the group. However, if they are given permission to access the data again the UIDs +// will be found and the contacts will reappear. +// +// [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members +type MediaKind string + +// The contexts in which to use this resource. +// +// The contexts in which to use the contact information. +// +// For example, someone might have distinct phone numbers for `work` and `private` contexts and may set the +// desired context on the respective phone number in the `phones` property. +// +// This section defines common contexts. +// +// Additional contexts may be defined in the properties or data types that make use of this property. +// +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type CalendarContext string + +// The contexts in which to use this resource. +// +// The contexts in which to use the contact information. +// +// For example, someone might have distinct phone numbers for `work` and `private` contexts and may set the +// desired context on the respective phone number in the `phones` property. +// +// This section defines common contexts. +// +// Additional contexts may be defined in the properties or data types that make use of this property. +// +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type CryptoKeyContext string + +// The contexts in which to use this resource. +// +// The contexts in which to use the contact information. +// +// For example, someone might have distinct phone numbers for `work` and `private` contexts and may set the +// desired context on the respective phone number in the `phones` property. +// +// This section defines common contexts. +// +// Additional contexts may be defined in the properties or data types that make use of this property. +// +// The enumerated common context values are: +// !- `private`: the contact information that may be used in a private context. +// !- `work`: the contact information that may be used in a professional context. +type LinkContext string + +// The JSContact version of this Card. +// +// The value MUST be one of the IANA-registered JSContact Version values for the version property. +// +// example: 1.0 +type JSContactVersion string + +type TypeOfAddress string +type TypeOfAddressComponent string +type TypeOfAnniversary string +type TypeOfAuthor string +type TypeOfContactCard string +type TypeOfCalendar string +type TypeOfCryptoKey string +type TypeOfDirectory string +type TypeOfEmailAddress string +type TypeOfLanguagePref string +type TypeOfLink string +type TypeOfMedia string +type TypeOfName string +type TypeOfNameComponent string +type TypeOfNickname string +type TypeOfNote string +type TypeOfOnlineService string +type TypeOfOrganization string +type TypeOfOrgUnit string +type TypeOfPartialDate string +type TypeOfPersonalInfo string +type TypeOfPhone string +type TypeOfPronouns string +type TypeOfRelation string +type TypeOfSchedulingAddress string +type TypeOfSpeakToAs string +type TypeOfTimestamp string +type TypeOfTitle string + +const ( + JSContactVersion_1_0 = JSContactVersion("1.0") + + // Types. + AddressType = TypeOfAddress("Address") + AddressComponentType = TypeOfAddressComponent("AddressComponent") + AnniversaryType = TypeOfAnniversary("Anniversary") + AuthorType = TypeOfAuthor("Author") + ContactCardType = TypeOfContactCard("Card") + CalendarType = TypeOfCalendar("Calendar") + CryptoKeyType = TypeOfCryptoKey("CryptoKey") + DirectoryType = TypeOfDirectory("Directory") + EmailAddressType = TypeOfEmailAddress("EmailAddress") + LanguagePrefType = TypeOfLanguagePref("LanguagePref") + LinkType = TypeOfLink("Link") + MediaType = TypeOfMedia("Media") + NameType = TypeOfName("Name") + NameComponentType = TypeOfNameComponent("NameComponent") + NicknameType = TypeOfNickname("Nickname") + NoteType = TypeOfNote("Note") + OnlineServiceType = TypeOfOnlineService("OnlineService") + OrganizationType = TypeOfOrganization("Organization") + OrgUnitType = TypeOfOrgUnit("OrgUnit") + PartialDateType = TypeOfPartialDate("PartialDate") + PersonalInfoType = TypeOfPersonalInfo("PersonalInfo") + PhoneType = TypeOfPhone("Phone") + PronounsType = TypeOfPronouns("Pronouns") + RelationType = TypeOfRelation("Relation") + SchedulingAddressType = TypeOfSchedulingAddress("SchedulingAddress") + SpeakToAsType = TypeOfSpeakToAs("SpeakToAs") + TimestampType = TypeOfTimestamp("Timestamp") + TitleType = TypeOfTitle("Title") + + // Kinds. + + AddressComponentKindRoom = AddressComponentKind("room") + AddressComponentKindApartment = AddressComponentKind("apartment") + AddressComponentKindFloor = AddressComponentKind("floor") + AddressComponentKindBuilding = AddressComponentKind("building") + AddressComponentKindNumber = AddressComponentKind("number") + AddressComponentKindName = AddressComponentKind("name") + AddressComponentKindBlock = AddressComponentKind("block") + AddressComponentKindSubdistrict = AddressComponentKind("subdistrict") + AddressComponentKindDistrict = AddressComponentKind("district") + AddressComponentKindLocality = AddressComponentKind("locality") + AddressComponentKindRegion = AddressComponentKind("region") + AddressComponentKindPostcode = AddressComponentKind("postcode") + AddressComponentKindCountry = AddressComponentKind("country") + AddressComponentKindDirection = AddressComponentKind("direction") + AddressComponentKindLandmark = AddressComponentKind("landmark") + AddressComponentKindPostOfficeBox = AddressComponentKind("postOfficeBox") + AddressComponentKindSeparator = AddressComponentKind("separator") + + AnniversaryKindBirth = AnniversaryKind("birth") + AnniversaryKindDeath = AnniversaryKind("death") + AnniversaryKindWedding = AnniversaryKind("wedding") + + CalendarKindCalendar = CalendarKind("calendar") + CalendarKindFreeBusy = CalendarKind("freeBusy") + + ContactCardKindIndividual = ContactCardKind("individual") + ContactCardKindGroup = ContactCardKind("group") + ContactCardKindOrg = ContactCardKind("org") + ContactCardKindLocation = ContactCardKind("location") + ContactCardKindDevice = ContactCardKind("device") + ContactCardKindApplication = ContactCardKind("application") + + DirectoryKindDirectory = DirectoryKind("directory") + DirectoryKindEntry = DirectoryKind("entry") + + LinkKindContact = LinkKind("contact") + + MediaKindPhoto = MediaKind("photo") + MediaKindSound = MediaKind("sound") + MediaKindLogo = MediaKind("logo") + + NameComponentKindTitle = NameComponentKind("title") + NameComponentKindGiven = NameComponentKind("given") + NameComponentKindGiven2 = NameComponentKind("given2") + NameComponentKindSurname = NameComponentKind("surname") + NameComponentKindSurname2 = NameComponentKind("surname2") + NameComponentKindCredential = NameComponentKind("credential") + NameComponentKindGeneration = NameComponentKind("generation") + NameComponentKindSeparator = NameComponentKind("separator") + + PersonalInfoKindExpertise = PersonalInfoKind("expertise") + PersonalInfoKindHobby = PersonalInfoKind("hobby") + PersonalInfoKindInterest = PersonalInfoKind("interest") + + TitleKindTitle = TitleKind("title") + TitleKindRole = TitleKind("role") + + // Contexts. + + AddressContextBilling = AddressContext("billing") + AddressContextDelivery = AddressContext("delivery") + AddressContextPrivate = AddressContext("private") + AddressContextWork = AddressContext("work") + + CalendarContextPrivate = CalendarContext("private") + CalendarContextWork = CalendarContext("work") + + CryptoKeyContextPrivate = CryptoKeyContext("private") + CryptoKeyContextWork = CryptoKeyContext("work") + + DirectoryContextPrivate = DirectoryContext("private") + DirectoryContextWork = DirectoryContext("work") + + EmailAddressContextPrivate = EmailAddressContext("private") + EmailAddressContextWork = EmailAddressContext("work") + + LanguagePrefContextPrivate = LanguagePrefContext("private") + LanguagePrefContextWork = LanguagePrefContext("work") + + LinkContextPrivate = LinkContext("private") + LinkContextWork = LinkContext("work") + + MediaContextPrivate = MediaContext("private") + MediaContextWork = MediaContext("work") + + NicknameContextPrivate = NicknameContext("private") + NicknameContextWork = NicknameContext("work") + + OnlineServiceContextPrivate = OnlineServiceContext("private") + OnlineServiceContextWork = OnlineServiceContext("work") + + OrganizationContextPrivate = OrganizationContext("private") + OrganizationContextWork = OrganizationContext("work") + + PhoneContextPrivate = PhoneContext("private") + PhoneContextWork = PhoneContext("work") + + PronounsContextPrivate = PronounsContext("private") + PronounsContextWork = PronounsContext("work") + + SchedulingAddressContextPrivate = SchedulingAddressContext("private") + SchedulingAddressContextWork = SchedulingAddressContext("work") + + // Relations. + + RelationAcquaintance = Relationship("acquaintance") + RelationAgent = Relationship("agent") + RelationChild = Relationship("child") + RelationCoResident = Relationship("co-resident") + RelationCoWorker = Relationship("co-worker") + RelationColleague = Relationship("colleague") + RelationContact = Relationship("contact") + RelationCrush = Relationship("crush") + RelationDate = Relationship("date") + RelationEmergency = Relationship("emergency") + RelationFriend = Relationship("friend") + RelationKin = Relationship("kin") + RelationMe = Relationship("me") + RelationMet = Relationship("met") + RelationMuse = Relationship("muse") + RelationNeighbor = Relationship("neighbor") + RelationParent = Relationship("parent") + RelationSibling = Relationship("sibling") + RelationSpouse = Relationship("spouse") + RelationSweetheart = Relationship("sweetheart") + + // GrammaticalGenders. + + GrammaticalGenderAnimate = GrammaticalGenderType("animate") + GrammaticalGenderCommon = GrammaticalGenderType("common") + GrammaticalGenderFeminine = GrammaticalGenderType("feminine") + GrammaticalGenderInanimate = GrammaticalGenderType("inanimate") + GrammaticalGenderMasculine = GrammaticalGenderType("masculine") + GrammaticalGenderNeuter = GrammaticalGenderType("neuter") + + // PersonalInfoLevels. + + PersonalInfoLevelHigh = PersonalInfoLevel("high") + PersonalInfoLevelMedium = PersonalInfoLevel("medium") + PersonalInfoLevelLow = PersonalInfoLevel("low") + + // PhoneFeatures. + + PhoneFeatureMobile = PhoneFeature("mobile") + PhoneFeatureVoice = PhoneFeature("voice") + PhoneFeatureText = PhoneFeature("text") + PhoneFeatureVideo = PhoneFeature("video") + PhoneFeatureMainNumber = PhoneFeature("main-number") + PhoneFeatureTextPhone = PhoneFeature("textphone") + PhoneFeatureFax = PhoneFeature("fax") + PhoneFeaturePager = PhoneFeature("pager") + + RscaleIso8601 = "iso8601" +) + +var ( + JSContactVersions = []JSContactVersion{ + JSContactVersion_1_0, + } + + AddressComponentKinds = []AddressComponentKind{ + AddressComponentKindRoom, + AddressComponentKindApartment, + AddressComponentKindFloor, + AddressComponentKindBuilding, + AddressComponentKindNumber, + AddressComponentKindName, + AddressComponentKindBlock, + AddressComponentKindSubdistrict, + AddressComponentKindDistrict, + AddressComponentKindLocality, + AddressComponentKindRegion, + AddressComponentKindPostcode, + AddressComponentKindCountry, + AddressComponentKindDirection, + AddressComponentKindLandmark, + AddressComponentKindPostOfficeBox, + AddressComponentKindSeparator, + } + + AddressContexts = []AddressContext{ + AddressContextBilling, + AddressContextDelivery, + AddressContextPrivate, + AddressContextWork, + } + + AnniversaryKinds = []AnniversaryKind{ + AnniversaryKindBirth, + AnniversaryKindDeath, + AnniversaryKindWedding, + } + + CalendarContexts = []CalendarContext{ + CalendarContextPrivate, + CalendarContextWork, + } + + CalendarResourceKinds = []CalendarKind{ + CalendarKindCalendar, + CalendarKindFreeBusy, + } + + ContactCardKinds = []ContactCardKind{ + ContactCardKindIndividual, + ContactCardKindGroup, + ContactCardKindOrg, + ContactCardKindLocation, + ContactCardKindDevice, + ContactCardKindApplication, + } + + CryptoKeyContexts = []CryptoKeyContext{ + CryptoKeyContextPrivate, + CryptoKeyContextWork, + } + + DirectoryContexts = []DirectoryContext{ + DirectoryContextPrivate, + DirectoryContextWork, + } + + DirectoryKinds = []DirectoryKind{ + DirectoryKindDirectory, + DirectoryKindEntry, + } + + EmailAddressContexts = []EmailAddressContext{ + EmailAddressContextPrivate, + EmailAddressContextWork, + } + + GrammaticalGenders = []GrammaticalGenderType{ + GrammaticalGenderAnimate, + GrammaticalGenderCommon, + GrammaticalGenderFeminine, + GrammaticalGenderInanimate, + GrammaticalGenderMasculine, + GrammaticalGenderNeuter, + } + + LanguagePrefContexts = []LanguagePrefContext{ + LanguagePrefContextPrivate, + LanguagePrefContextWork, + } + + LinkContexts = []LinkContext{ + LinkContextPrivate, + LinkContextWork, + } + + LinkKinds = []LinkKind{ + LinkKindContact, + } + + MediaContexts = []MediaContext{ + MediaContextPrivate, + MediaContextWork, + } + + MediaKinds = []MediaKind{ + MediaKindPhoto, + MediaKindSound, + MediaKindLogo, + } + + NameComponentKinds = []NameComponentKind{ + NameComponentKindTitle, + NameComponentKindGiven, + NameComponentKindGiven2, + NameComponentKindSurname, + NameComponentKindSurname2, + NameComponentKindCredential, + NameComponentKindGeneration, + NameComponentKindSeparator, + } + + NicknameContexts = []NicknameContext{ + NicknameContextPrivate, + NicknameContextWork, + } + + OnlineServiceContexts = []OnlineServiceContext{ + OnlineServiceContextPrivate, + OnlineServiceContextWork, + } + + OrganizationContexts = []OrganizationContext{ + OrganizationContextPrivate, + OrganizationContextWork, + } + + PersonalInfoKinds = []PersonalInfoKind{ + PersonalInfoKindExpertise, + PersonalInfoKindHobby, + PersonalInfoKindInterest, + } + + PersonalInfoLevels = []PersonalInfoLevel{ + PersonalInfoLevelHigh, + PersonalInfoLevelMedium, + PersonalInfoLevelLow, + } + + PhoneContexts = []PhoneContext{ + PhoneContextPrivate, + PhoneContextWork, + } + + PhoneFeatures = []PhoneFeature{ + PhoneFeatureMobile, + PhoneFeatureVoice, + PhoneFeatureText, + PhoneFeatureVideo, + PhoneFeatureMainNumber, + PhoneFeatureTextPhone, + PhoneFeatureFax, + PhoneFeaturePager, + } + + PronounsContexts = []PronounsContext{ + PronounsContextPrivate, + PronounsContextWork, + } + + Relations = []Relationship{ + RelationAcquaintance, + RelationAgent, + RelationChild, + RelationCoResident, + RelationCoWorker, + RelationColleague, + RelationContact, + RelationCrush, + RelationDate, + RelationEmergency, + RelationFriend, + RelationKin, + RelationMe, + RelationMet, + RelationMuse, + RelationNeighbor, + RelationParent, + RelationSibling, + RelationSpouse, + RelationSweetheart, + } + + SchedulingAddressContexts = []SchedulingAddressContext{ + SchedulingAddressContextPrivate, + SchedulingAddressContextWork, + } + + TitleKinds = []TitleKind{ + TitleKindTitle, + TitleKindRole, + } +) + +// A `PatchObject` is of type `String[*]` and represents an unordered set of patches on a JSON object. +// +// Each key is a path represented in a subset of the JSON Pointer format [RFC6901]. +// +// The paths have an implicit leading `"/"`, so each key is prefixed with `"/"` before applying the +// JSON Pointer evaluation algorithm. +// +// A patch within a `PatchObject` is only valid if all the following conditions apply: +// !1. The pointer MAY reference inside an array, but if the last reference token in the pointer is an array index, +// then the patch value MUST NOT be null. The pointer MUST NOT use `"-"` as an array index in any of its reference +// tokens (i.e., you MUST NOT insert/delete from an array, but you MAY replace the contents of its existing members. +// To add or remove members, one needs to replace the complete array value). +// !2. All reference tokens prior to the last (i.e., the value after the final slash) MUST already exist as values +// in the object being patched. If the last reference token is an array index, then a member at this index MUST +// already exist in the referenced array. +// !3. There MUST NOT be two patches in the `PatchObject` where the pointer of +// one is the prefix of the pointer of the other, e.g., `"addresses/1/city"` and `"addresses"`. +// !4. The value for the patch MUST be valid for the property being set (of the correct type and obeying any +// other applicable restrictions), or if null, the property MUST be optional. +// +// The value associated with each pointerdetermines how to apply that patch: +// !- If null, remove the property from the patched object. If the key is not present in the parent, this is a no-op. +// !- If non-null, set the value given as the value for this property (this may be a replacement or addition to the +// object being patched). +// +// A `PatchObject` does not define its own `@type` property. Instead, the `@type` property in a patch MUST be handled +// as any other patched property value. +// +// Implementations MUST reject a `PatchObject` in its entirety if any of its patches are invalid. +// +// Implementations MUST NOT apply partial patches. +// +// [RFC6901]: https://www.rfc-editor.org/rfc/rfc6901.html +type PatchObject map[string]any + +type Calendar struct { + // The JSContact type of the object. + // + // The value MUST be `Calendar`, if set. + Type TypeOfCalendar `json:"@type,omitempty"` + + // The kind of the resource. + // + // The allowed values are defined in the property definition that makes use of the Resource type. + // + // Some property definitions may change this property from being optional to mandatory. + // + // A contact card with a `kind` property equal to `group` represents a group of contacts. + // + // Clients often present these separately from other contact cards. + // + // The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other + // contacts that are the members of this group. + // + // Clients should consider the group to contain any `ContactCard` with a matching UID, from + // any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. + // + // UIDs that cannot be found SHOULD be ignored but preserved. + // + // For example, suppose a user adds contacts from a shared address book to their private group, then + // temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will + // disappear from the group. However, if they are given permission to access the data again the UIDs + // will be found and the contacts will reappear. + // + // [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members + Kind CalendarKind `json:"kind,omitempty"` + + // The resource value. + // + // This MUST be a URI as defined in [Section 3 of RFC3986]. + // + // [Section 3 of RFC3986]: https://www.rfc-editor.org/rfc/rfc3986.html#section-3 + Uri string `json:"uri,omitempty"` + + // The [RFC2046 media type] of the resource identified by the uri property value. + // + // [RFC2046 media type]: https://www.rfc-editor.org/rfc/rfc2046.html + MediaType string `json:"mediaType,omitempty"` + + // The contexts in which to use this resource. + // + // The contexts in which to use the contact information. + // + // For example, someone might have distinct phone numbers for `work` and `private` contexts and may set the + // desired context on the respective phone number in the `phones` property. + // + // This section defines common contexts. + // + // Additional contexts may be defined in the properties or data types that make use of this property. + // + // The enumerated common context values are: + // !- `private`: the contact information that may be used in a private context. + // !- `work`: the contact information that may be used in a professional context. + Contexts map[CalendarContext]bool `json:"contexts,omitempty"` + + // The [preference] of the resource in relation to other resources. + // + // A preference order for contact information. + // + // For example, a person may have two email addresses and prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, with 1 + // being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + // + // [preference]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-pref + Pref uint `json:"pref,omitzero"` + + // A [custom label] for the value. + // + // The labels associated with the contact data. + // + // Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + // + // [custom label]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-label + Label string `json:"label,omitempty"` +} + +type CryptoKey struct { + // The JSContact type of the object. + // + // The value MUST be `CryptoKey`, if set. + Type TypeOfCryptoKey `json:"@type,omitempty"` + + // The resource value. + // + // This MUST be a URI as defined in [Section 3 of RFC3986]. + // + // [Section 3 of RFC3986]: https://www.rfc-editor.org/rfc/rfc3986.html#section-3 + Uri string `json:"uri,omitempty"` + + // The [RFC2046 media type] of the resource identified by the uri property value. + // + // [RFC2046 media type]: https://www.rfc-editor.org/rfc/rfc2046.html + MediaType string `json:"mediaType,omitempty"` + + // The contexts in which to use this resource. + // + // The contexts in which to use the contact information. + // + // For example, someone might have distinct phone numbers for `work` and `private` contexts and may set the + // desired context on the respective phone number in the `phones` property. + // + // This section defines common contexts. + // + // Additional contexts may be defined in the properties or data types that make use of this property. + // + // The enumerated common context values are: + // !- `private`: the contact information that may be used in a private context. + // !- `work`: the contact information that may be used in a professional context. + Contexts map[CryptoKeyContext]bool `json:"contexts,omitempty"` + + // The [preference] of the resource in relation to other resources. + // + // A preference order for contact information. + // + // For example, a person may have two email addresses and prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, with 1 + // being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + // + // [preference]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-pref + Pref uint `json:"pref,omitzero"` + + // A [custom label] for the value. + // + // The labels associated with the contact data. + // + // Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + // + // [custom label]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-label + Label string `json:"label,omitempty"` +} + +type Link struct { + // The JSContact type of the object. + // + // The value MUST be `Link`, if set. + Type TypeOfLink `json:"@type,omitempty"` + + // The kind of the resource. + // + // The allowed values are defined in the property definition that makes use of the Resource type. + // + // Some property definitions may change this property from being optional to mandatory. + // + // A contact card with a `kind` property equal to `group` represents a group of contacts. + // + // Clients often present these separately from other contact cards. + // + // The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other + // contacts that are the members of this group. + // + // Clients should consider the group to contain any `ContactCard` with a matching UID, from + // any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. + // + // UIDs that cannot be found SHOULD be ignored but preserved. + // + // For example, suppose a user adds contacts from a shared address book to their private group, then + // temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will + // disappear from the group. However, if they are given permission to access the data again the UIDs + // will be found and the contacts will reappear. + // + // [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members + Kind LinkKind `json:"kind,omitempty"` + + // The resource value. + // + // This MUST be a URI as defined in [Section 3 of RFC3986]. + // + // [Section 3 of RFC3986]: https://www.rfc-editor.org/rfc/rfc3986.html#section-3 + Uri string `json:"uri,omitempty"` + + // The [RFC2046 media type] of the resource identified by the uri property value. + // + // [RFC2046 media type]: https://www.rfc-editor.org/rfc/rfc2046.html + MediaType string `json:"mediaType,omitempty"` + + // The contexts in which to use this resource. + // + // The contexts in which to use the contact information. + // + // For example, someone might have distinct phone numbers for `work` and `private` contexts and may set the + // desired context on the respective phone number in the `phones` property. + // + // This section defines common contexts. + // + // Additional contexts may be defined in the properties or data types that make use of this property. + // + // The enumerated common context values are: + // !- `private`: the contact information that may be used in a private context. + // !- `work`: the contact information that may be used in a professional context. + Contexts map[LinkContext]bool `json:"contexts,omitempty"` + + // The [preference] of the resource in relation to other resources. + // + // A preference order for contact information. + // + // For example, a person may have two email addresses and prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, with 1 + // being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + // + // [preference]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-pref + Pref uint `json:"pref,omitzero"` + + // A [custom label] for the value. + // + // The labels associated with the contact data. + // + // Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + // + // [custom label]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-label + Label string `json:"label,omitempty"` +} + +type Directory struct { + // The JSContact type of the object. + // + // The value MUST be `Directory`, if set. + Type TypeOfDirectory `json:"@type,omitempty"` + + // The kind of the resource. + // + // The allowed values are defined in the property definition that makes use of the Resource type. + // + // Some property definitions may change this property from being optional to mandatory. + // + // A contact card with a `kind` property equal to `group` represents a group of contacts. + // + // Clients often present these separately from other contact cards. + // + // The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other + // contacts that are the members of this group. + // + // Clients should consider the group to contain any `ContactCard` with a matching UID, from + // any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. + // + // UIDs that cannot be found SHOULD be ignored but preserved. + // + // For example, suppose a user adds contacts from a shared address book to their private group, then + // temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will + // disappear from the group. However, if they are given permission to access the data again the UIDs + // will be found and the contacts will reappear. + // + // [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members + Kind DirectoryKind `json:"kind,omitempty"` + + // The resource value. + // + // This MUST be a URI as defined in Section 3 of [RFC3986-section3]. + // + // [RFC3986-section3]: https://www.rfc-editor.org/rfc/rfc3986.html#section-3 + Uri string `json:"uri,omitempty"` + + // The [RFC2046 media type] of the resource identified by the uri property value. + // + // [RFC2046 media type]: https://www.rfc-editor.org/rfc/rfc2046.html + MediaType string `json:"mediaType,omitempty"` + + // The contexts in which to use this resource. + // + // The contexts in which to use the contact information. + // + // For example, someone might have distinct phone numbers for work and private contexts and may set the + // desired context on the respective phone number in the phones (Section 2.3.3) property. + // + // This section defines common contexts. + // + // Additional contexts may be defined in the properties or data types that make use of this property. + // + // The enumerated common context values are: + // !- `private`: the contact information that may be used in a private context. + // !- `work`: the contact information that may be used in a professional context. + Contexts map[DirectoryContext]bool `json:"contexts,omitempty"` + + // The [preference] of the resource in relation to other resources. + // + // A preference order for contact information. + // + // For example, a person may have two email addresses and prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, with 1 + // being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + // + // [preference]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-pref + Pref uint `json:"pref,omitzero"` + + // A [custom label] for the value. + // + // The labels associated with the contact data. + // + // Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + // + // [custom label]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-label + Label string `json:"label,omitempty"` + + // The position of the directory resource in the list of all `Directory` objects having the same kind property + // value in the Card. + // + // Only in `Directory` `Resource` types. + // + // If set, the `listAs` value MUST be higher than zero. + // + // Multiple directory resources MAY have the same `listAs` property value or none. + // + // Sorting such same-valued entries is implementation-specific. + ListAs uint `json:"listAs,omitzero"` +} + +type Media struct { + // The JSContact type of the object. + // + // The value MUST be `Media`, if set. + Type TypeOfMedia `json:"@type,omitempty"` + + // The kind of the resource. + // + // The allowed values are defined in the property definition that makes use of the Resource type. + // + // Some property definitions may change this property from being optional to mandatory. + // + // A contact card with a `kind` property equal to `group` represents a group of contacts. + // + // Clients often present these separately from other contact cards. + // + // The `members` property, as defined in [RFC 9553, Section 2.1.6], contains a set of UIDs for other + // contacts that are the members of this group. + // + // Clients should consider the group to contain any `ContactCard` with a matching UID, from + // any account they have access to with support for the `urn:ietf:params:jmap:contacts` capability. + // + // UIDs that cannot be found SHOULD be ignored but preserved. + // + // For example, suppose a user adds contacts from a shared address book to their private group, then + // temporarily loses access to this address book. The UIDs cannot be resolved so the contacts will + // disappear from the group. However, if they are given permission to access the data again the UIDs + // will be found and the contacts will reappear. + // + // [RFC 9553, Section 2.1.8]: https://www.rfc-editor.org/rfc/rfc9553#members + Kind MediaKind `json:"kind,omitempty"` + + // The resource value. + // + // This MUST be a URI as defined in Section 3 of [RFC3986-section3]. + // + // [RFC3986-section3]: https://www.rfc-editor.org/rfc/rfc3986.html#section-3 + Uri string `json:"uri,omitempty"` + + // The [RFC2046 media type] of the resource identified by the uri property value. + // + // [RFC2046 media type]: https://www.rfc-editor.org/rfc/rfc2046.html + MediaType string `json:"mediaType,omitempty"` + + // The contexts in which to use this resource. + // + // The contexts in which to use the contact information. + // + // For example, someone might have distinct phone numbers for work and private contexts and may set the + // desired context on the respective phone number in the phones (Section 2.3.3) property. + // + // This section defines common contexts. + // + // Additional contexts may be defined in the properties or data types that make use of this property. + // + // The enumerated common context values are: + // !- `private`: the contact information that may be used in a private context. + // !- `work`: the contact information that may be used in a professional context. + Contexts map[MediaContext]bool `json:"contexts,omitempty"` + + // The [preference] of the resource in relation to other resources. + // + // A preference order for contact information. + // + // For example, a person may have two email addresses and prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, with 1 + // being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + // + // [preference]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-pref + Pref uint `json:"pref,omitzero"` + + // A [custom label] for the value. + // + // The labels associated with the contact data. + // + // Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + // + // [custom label]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-label + Label string `json:"label,omitempty"` + + // An id for the Blob representing the binary contents of the resource. + // + // This is a JMAP extension of JSContact, and only present in `Media` `Resource` types. + // + // When returning `ContactCard`s, any `Media` with a `data:` URI SHOULD return a `blobId` property + // and omit the `uri` property. + // + // The `mediaType` property MUST also be set. + // + // Similarly, when creating or updating a `ContactCard`, clients MAY send a `blobId` instead + // of the `uri` property for a `Media` object. + BlobId string `json:"blobId,omitempty"` +} + +type Relation struct { + // The JSContact type of the object: the value MUST be `Relation`, if set. + Type TypeOfRelation `json:"@type,omitempty"` + + // The relationship of the related Card to the Card, defined as a set of relation types. + // + // The keys in the set define the relation type; the values for each key in the set MUST be "true". + // + // The relationship between the two objects is undefined if the set is empty. + // + // The initial list of enumerated relation types matches the IANA-registered TYPE `IANA-vCard`` + // parameter values of the vCard RELATED property ([Section 6.6.6 of RFC6350]): + // !- `acquaintance` + // !- `agent` + // !- `child` + // !- `co-resident` + // !- `co-worker` + // !- `colleague` + // !- `contact` + // !- `crush` + // !- `date` + // !- `emergency` + // !- `friend` + // !- `kin` + // !- `me` + // !- `met` + // !- `muse` + // !- `neighbor` + // !- `parent` + // !- `sibling` + // !- `spouse` + // !- `sweetheart` + // + // [Section 6.6.6 of RFC6350]: https://www.rfc-editor.org/rfc/rfc6350.html#section-6.6.6 + Relation map[Relationship]bool `json:"relation,omitempty"` +} + +type NameComponent struct { + // The JSContact type of the object: the value MUST be `NameComponent`, if set. + Type TypeOfNameComponent `json:"@type,omitempty"` + + // The value of the name component. + // + // This can be composed of one or multiple words such as `Poe` or `van Gogh`. + Value string `json:"value"` + + // The kind of the name component. + // + // !- `title`: an honorific title or prefix, e.g., `Mr.`, `Ms.`, or `Dr.` + // !- `given`: a given name, also known as "first name" or "personal name" + // !- `given2`: a name that appears between the given and surname such as a middle name or patronymic name + // !- `surname`: a surname, also known as "last name" or "family name" + // !- `surname2`: a secondary surname (used in some cultures), also known as "maternal surname" + // !- `credential`: a credential, also known as "accreditation qualifier" or "honorific suffix", e.g., `B.A.`, `Esq.` + // !- `generation`: a generation marker or qualifier, e.g., `Jr.` or `III` + // !- `separator`: a formatting separator between two ordered name non-separator components; the value property of the component includes the verbatim separator, for example, a hyphen character or even an empty string. This value has higher precedence than the defaultSeparator property of the Name. Implementations MUST NOT insert two consecutive separator components; instead, they SHOULD insert a single separator component with the combined value; this component kind MUST NOT be set if the `Name` `isOrdered` property value is `false` + Kind NameComponentKind `json:"kind"` + + // The pronunciation of the name component. + // + // If this property is set, then at least one of the `Name` object properties, `phoneticSystem` or `phoneticScript`, + // MUST be set. + Phonetic string `json:"phonetic,omitempty"` +} + +type Nickname struct { + // The JSContact type of the object: the value MUST be `Nickname`, if set. + Type TypeOfNickname `json:"@type,omitempty"` + + // The nickname. + Name string `json:"name"` + + // The contexts in which to use the nickname. + // TODO document https://www.rfc-editor.org/rfc/rfc9553.html#prop-contexts + Contexts map[NicknameContext]bool `json:"contexts,omitempty"` + + // The preference of the nickname in relation to other nicknames. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` +} + +type OrgUnit struct { + // The JSContact type of the object: the value MUST be `OrgUnit`, if set. + Type TypeOfOrgUnit `json:"@type,omitempty"` + + // The name of the organizational unit. + Name string `json:"name"` + + // he value to lexicographically sort the organizational unit in relation to other organizational + // units of the same level when compared by name. + // + // The level is defined by the array index of the organizational unit in the units property + // of the Organization object. + // + // The property value defines the verbatim string value to compare. + // + // In absence of this property, the name property value MAY be used for comparison. + SortAs string `json:"sortAs,omitempty"` +} + +type Organization struct { + // The JSContact type of the object: the value MUST be `Organization`, if set. + Type TypeOfOrganization `json:"@type,omitempty"` + + // The name of the organization. + Name string `json:"name,omitempty"` + + // A list of organizational units, ordered as descending by hierarchy. + // (e.g., a geographic or functional division sorts before a department within that division). + // + // If set, the list MUST contain at least one entry + Units []OrgUnit `json:"units,omitempty"` + + // The value to lexicographically sort the organization in relation to other organizations when + // compared by name. + // + // The value defines the verbatim string value to compare. + // + // In absence of this property, the name property value MAY be used for comparison. + SortAs string `json:"sortAs,omitempty"` + + // The contexts in which association with the organization applies. + // + // For example, membership in a choir may only apply in a private context. + // + // TODO document https://www.rfc-editor.org/rfc/rfc9553.html#prop-contexts + Contexts map[OrganizationContext]bool `json:"contexts,omitempty"` +} + +type Pronouns struct { + // The JSContact type of the object: the value MUST be `Pronouns`, if set. + Type TypeOfPronouns `json:"@type,omitempty"` + + // The pronouns. + // + // Any value or form is allowed. + // + // Examples in English include `she/her` and `they/them/theirs`. + // + // The value MAY be overridden in the `localizations` property. + Pronouns string `json:"pronouns"` + + // The contexts in which to use the pronouns. + Contexts map[PronounsContext]bool `json:"contexts,omitempty"` + + // The preference of the pronouns in relation to other pronouns in the same context. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` +} + +type Title struct { + // The JSContact type of the object: the value MUST be `Title`, if set. + Type TypeOfTitle `json:"@type,omitempty"` + + // The title or role name of the entity represented by the Card. + Name string `json:"name"` + + // The organizational or situational kind of the title. + // + // Some organizations and individuals distinguish between titles as organizational + // positions and roles as more temporary assignments such as in project management. + // + // The enumerated values are: + // !- `title` + // !- `role` + Kind TitleKind `json:"kind,omitempty"` + + // The identifier of the organization in which this title is held. + OrganizationId string `json:"organizationId,omitempty"` +} + +type SpeakToAs struct { + // The JSContact type of the object: the value MUST be `SpeakToAs`, if set. + Type TypeOfSpeakToAs `json:"@type,omitempty"` + + // The grammatical gender to use in salutations and other grammatical constructs. + // + // For example, the German language distinguishes by grammatical gender in salutations such as + // `Sehr geehrte` (feminine) and `Sehr geehrter` (masculine). + // + // The enumerated values are: + // !- `animate` + // !- `common` + // !- `feminine` + // !- `inanimate` + // !- `masculine` + // !- `neuter` + // + // Note that the grammatical gender does not allow inferring the gender identities or assigned + // sex of the contact. + GrammaticalGender GrammaticalGenderType `json:"grammaticalGender,omitempty"` + + // The pronouns that the contact chooses to use for themselves. + Pronouns map[string]Pronouns `json:"pronouns,omitempty"` +} + +type Name struct { + // The JSContact type of the object: the value MUST be `Name`, if set. + Type TypeOfName `json:"@type,omitempty"` + + // The components making up this name. + // + // The components property MUST be set if the full property is not set; otherwise, it SHOULD be set. + // + // The component list MUST have at least one entry having a different kind property value than `separator`. + // + // `Name` components SHOULD be ordered such that when their values are joined as a `string`, a valid full name + // of the entity is produced. If so, implementations MUST set the isOrdered property value to `true`. + // + // If the name `components` are ordered, then the `defaultSeparator` property and name components with the kind + // property value set to `separator` give guidance on what characters to insert between components, but + // implementations are free to choose any others. + // + // When lacking a separator, inserting a single space character in between the name component values is a good choice. + // + // If, instead, the name components follow no particular order, then the `isOrdered` property value MUST be + // `false`, the `components` property MUST NOT contain a `NameComponent` with the `kind` property value set to + // `separator`, and the `defaultSeparator` property MUST NOT be set. + Components []NameComponent `json:"components,omitempty"` + + // The indicator if the name components in the components property are ordered. + // + // default: false + IsOrdered bool `json:"isOrdered,omitzero"` + + // The default separator to insert between name component values when concatenating all name component values to a single String. + // + // Also see the definition of the kind property value `separator` for the `NameComponent` object. + // + // The `defaultSeparator` property MUST NOT be set if the `Name` `isOrdered` property value is `false` or if + // the components property is not set. + // + // example: {"name": { "components": [{ "kind": "given", "value": "Diego" }, { "kind": "surname", "value": "Rivera" }, { "kind": "surname2", "value": "Barrientos" }], "isOrdered": true} + DefaultSeparator string `json:"defaultSeparator,omitempty"` + + // The full name representation of the `Name`. + // + // The `full` property MUST be set if the components property is not set. + // + // example: Mr. John Q. Public, Esq. + Full string `json:"full,omitempty"` + + // The value to lexicographically sort the name in relation to other names when compared by a name component type. + // + // The keys in the map define the name component type. The values define the verbatim string to compare when sorting + // by the name component type. + // + // Absence of a key indicates that the name component type SHOULD NOT be considered during sort. + // + // Sorting by that missing name component type, or if the sortAs property is not set, is implementation-specific. + // + // The sortAs property MUST NOT be set if the components property is not set. + // + // Each key in the map MUST be a valid name component type value as defined for the kind property of the NameComponent + // object. + // + // For each key in the map, there MUST exist at least one NameComponent object that has the type in the components + // property of the name. + SortAs map[string]string `json:"sortAs,omitempty"` + + // The script used in the value of the NameComponent phonetic property. + // TODO https://www.rfc-editor.org/rfc/rfc9553.html#prop-phonetic + PhoneticScript string `json:"phoneticScript,omitempty"` + + // The phonetic system used in the NameComponent phonetic property. + // TODO https://www.rfc-editor.org/rfc/rfc9553.html#prop-phonetic + PhoneticSystem string `json:"phoneticSystem,omitempty"` +} + +type EmailAddress struct { + // The JSContact type of the object: the value MUST be `EmailAddress`, if set. + Type TypeOfEmailAddress `json:"@type,omitempty"` + + // The email address. + // + // This MUST be an addr-spec value as defined in [Section 3.4.1 of RFC5322]. + // + // [Section 3.4.1 of RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html#section-3.4.1 + Address string `json:"address"` + + // The contexts in which to use this email address. + Contexts map[EmailAddressContext]bool `json:"contexts,omitempty"` + + // The preference of the email address in relation to other email addresses. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` + + // A custom label for the value. + // + // The labels associated with the contact data. Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + Label string `json:"label,omitempty"` +} + +type OnlineService struct { + // The JSContact type of the object: the value MUST be `OnlineService`, if set. + Type TypeOfOnlineService `json:"@type,omitempty"` + + // The name of the online service or protocol. + // + // The name MAY be capitalized the same as on the service's website, app, or publishing material, + // but names MUST be considered equal if they match case-insensitively. + // + // Examples are `GitHub`, `kakao`, and `Mastodon`. + Service string `json:"service,omitempty"` + + // The identifier for the entity represented by the Card at the online service. + Uri string `json:"uri,omitempty"` + + // The name the entity represented by the Card at the online service. + // + // Any free-text value is allowed. + User string `json:"user,omitempty"` + + // The contexts in which to use the service. + Contexts map[OnlineServiceContext]bool `json:"contexts,omitempty"` + + // The preference of the service in relation to other services. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` + + // A custom label for the value. + // + // The labels associated with the contact data. Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + Label string `json:"label,omitempty"` +} + +type Phone struct { + // The JSContact type of the object: the value MUST be `Phone`, if set. + Type TypeOfPhone `json:"@type,omitempty"` + + // The phone number as either a URI or free text. + // + // Typical URI schemes are `tel` [RFC3966] or `sip` [RFC3261], but any URI scheme is allowed. + // + // [RFC3966]: https://www.rfc-editor.org/rfc/rfc3966.html + // [RFC3261]: https://www.rfc-editor.org/rfc/rfc3261.html + Number string `json:"number"` + + // The set of contact features that the phone number may be used for. + // + // The set is represented as an object, with each key being a method type. + // + // The boolean value MUST be `true`. + // + // The enumerated values are: + // !- `mobile`: this number is for a mobile phone + // !- `voice`: this number supports calling by voice + // !- `text`: this number supports text messages (SMS) + // !- `video`: this number supports video conferencing + // !- `main-number`: this number is a main phone number such as the number of the front desk at a company, as opposed to a direct-dial number of an individual employee + // !- `textphone`: this number is for a device for people with hearing or speech difficulties + // !- `fax`: this number supports sending faxes + // !- `pager`: this number is for a pager or beeper + Features map[PhoneFeature]bool `json:"features,omitempty"` + + // The contexts in which to use the number. + Contexts map[PhoneContext]bool `json:"contexts,omitempty"` + + // The preference of the number in relation to other numbers. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` + + // A custom label for the value. + // + // The labels associated with the contact data. Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + Label string `json:"label,omitempty"` +} + +type LanguagePref struct { + // The JSContact type of the object: the value MUST be `LanguagePref`, if set. + Type TypeOfLanguagePref `json:"@type,omitempty"` + + // The preferred language. + // + // This MUST be a language tag as defined in [RFC5646]. + // + // [RFC5646]: https://www.rfc-editor.org/rfc/rfc5646.html + Language string `json:"language"` + + // The contexts in which to use the language. + Contexts map[LanguagePrefContext]bool `json:"contexts,omitempty"` + + // The preference of the language in relation to other languages of the same contexts. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` +} + +type SchedulingAddress struct { + // The JSContact type of the object: the value MUST be `SchedulingAddress`, if set. + Type TypeOfSchedulingAddress `json:"@type,omitempty"` + + // The address to use for calendar scheduling with the contact. + Uri string `json:"uri,omitempty"` + + // The contexts in which to use the scheduling address. + Contexts map[SchedulingAddressContext]bool `json:"contexts,omitempty"` + + // The preference of the scheduling address in relation to other scheduling addresses. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` + + // A custom label for the scheduling address. + // + // The labels associated with the contact data. Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + Label string `json:"label,omitempty"` +} + +type AddressComponent struct { + // The JSContact type of the object: the value MUST be `AddressComponent`, if set. + Type TypeOfAddressComponent `json:"@type,omitempty"` + + // The value of the address component. + Value string `json:"value"` + + // The kind of the address component. + // + // The enumerated values are: + // ! `room`: the room, suite number, or identifier + // ! `apartment`: the extension designation such as the apartment number, unit, or box number + // ! `floor`: the floor or level the address is located on + // ! `building`: the building, tower, or condominium the address is located in + // ! `number`: the street number, e.g., `"123"`; this value is not restricted to numeric values and can include any value such + // as number ranges (`"112-10"`), grid style (`"39.2 RD"`), alphanumerics (`"N6W23001"`), or fractionals (`"123 1/2"`) + // ! `name`: the street name + // ! `block`: the block name or number + // ! `subdistrict`: the subdistrict, ward, or other subunit of a district + // ! `district`: the district name + // ! `locality`: the municipality, city, town, village, post town, or other locality + // ! `region`: the administrative area such as province, state, prefecture, county, or canton + // ! `postcode`: the postal code, post code, ZIP code, or other short code associated with the address by the relevant country's postal system + // ! `country`: the country name + // ! `direction`: the cardinal direction or quadrant, e.g., "north" + // ! `landmark`: the publicly known prominent feature that can substitute the street name and number, e.g., "White House" or "Taj Mahal" + // ! `postOfficeBox`: the post office box number or identifier + // ! `separator`: a formatting separator between two ordered address non-separator components; the value property of the component includes the + // verbatim separator, for example, a hyphen character or even an empty string; this value has higher precedence than the `defaultSeparator` property + // of the `Address`; implementations MUST NOT insert two consecutive separator components; instead, they SHOULD insert a single separator component + // with the combined value; this component kind MUST NOT be set if the `Address` `isOrdered` property value is `false`. + Kind AddressComponentKind `json:"kind"` + + // The pronunciation of the name component. + // + // If this property is set, then at least one of the Address object `phoneticSystem` or `phoneticScript` properties MUST be set. + Phonetic string `json:"phonetic,omitempty"` +} + +// An Address object has the following properties, of which at least one of components, coordinates, countryCode, full or timeZone MUST be set. +type Address struct { + // The JSContact type of the object: the value MUST be `Address`, if set. + Type TypeOfAddress `json:"@type,omitempty"` + + // The components that make up the address. + // + // The component list MUST have at least one entry that has a kind property value other than `separator`. + // + // Address components SHOULD be ordered such that when their values are joined as a String, a valid full address is produced. + // + // If so, implementations MUST set the isOrdered property value to `true`. + // + // If the address components are ordered, then the `defaultSeparator` property and address components with the `kind` + // property value set to `separator` give guidance on what characters to insert between components, but implementations + // are free to choose any others. + // + // When lacking a separator, inserting a single space character in between address component values is a good choice. + // + // If, instead, the address components follow no particular order, then the isOrdered property value MUST be `false`, + // the components property MUST NOT contain an `AddressComponent` with the `kind` property value set to `separator`, + // and the `defaultSeparator` property MUST NOT be set. + Components []AddressComponent `json:"components,omitempty"` + + // The indicator if the address components in the components property are ordered + // + // default: false + IsOrdered bool `json:"isOrdered,omitzero"` + + // The Alpha-2 country code as of [ISO.3166-1]. + // + // [ISO.3166-1]: https://www.iso.org/iso-3166-country-codes.html + CountryCode string `json:"countryCode,omitempty"` + + // A "geo:" URI [RFC5870] for the address. + // + // [RFC5870]: https://www.rfc-editor.org/rfc/rfc5870.html + Coordinates string `json:"coordinates,omitempty"` + + // The time zone in which the address is located. + // + // This MUST be a time zone name registered in the IANA Time Zone Database [IANA-TZ]. + // + // [IANA-TZ]: https://www.iana.org/time-zones + TimeZone string `json:"timeZone,omitempty"` + + // The contexts in which to use this address. + // + // The boolean value MUST be `true`. + // + // In addition to the common contexts, allowed key values are: + // ! `billing`: an address to be used for billing + // ! `delivery`: an address to be used for delivering physical items + Contexts map[AddressContext]bool `json:"contexts,omitempty"` + + // The full address, including street, region, or country. + // + // The purpose of this property is to define an address, even if the individual address components are not known. + Full string `json:"full,omitempty"` + + // The default separator to insert between address component values when concatenating all address component values to a single String. + // + // Also see the definition of the `kind` property value `separator` for the `AddressComponent` object. + // + // The `defaultSeparator` property MUST NOT be set if the Address `isOrdered` property value is `false` or if the `components` property is not set. + DefaultSeparator string `json:"defaultSeparator,omitempty"` + + // The preference of the address in relation to other addresses. + // + // A preference order for contact information. For example, a person may have two email addresses and + // prefer to be contacted with one of them. + // + // The value MUST be in the range of 1 to 100. Lower values correspond to a higher level of preference, + // with 1 being most preferred. + // + // If no preference is set, then the contact information MUST be interpreted as being least preferred. + // + // Note that the preference is only defined in relation to contact information of the same type. + // + // For example, the preference orders within emails and phone numbers are independent of each other. + Pref uint `json:"pref,omitzero"` + + // The script used in the value of the Address phonetic property. + // TODO https://www.rfc-editor.org/rfc/rfc9553.html#prop-phonetic + PhoneticScript string `json:"phoneticScript,omitempty"` + + // The phonetic system used in the NameComAddressponent phonetic property. + // TODO https://www.rfc-editor.org/rfc/rfc9553.html#prop-phonetic + PhoneticSystem string `json:"phoneticSystem,omitempty"` +} + +type AnniversaryDate interface { + isAnniversaryDate() // marker +} + +type AnniversaryDateContainer struct { + Value AnniversaryDate +} + +func (a *Anniversary) UnmarshalJSON(b []byte) error { + var typ struct { + Date struct { + Type string `json:"@type"` + } `json:"date,omitzero"` + } + if err := json.Unmarshal(b, &typ); err != nil { + return err + } + switch typ.Date.Type { + case string(PartialDateType): + a.Date = new(PartialDate) + case string(TimestampType): + a.Date = new(Timestamp) + default: + return fmt.Errorf("unsupported '%T.date' @type: \"%v\"", a, typ.Date.Type) + } + + type tmp Anniversary + return json.Unmarshal(b, (*tmp)(a)) +} + +// A PartialDate object represents a complete or partial calendar date in the Gregorian calendar. +// +// It represents a complete date, a year, a month in a year, or a day in a month. +type PartialDate struct { + // The JSContact type of the object; the value MUST be `PartialDate`, if set. + Type TypeOfPartialDate `json:"@type,omitempty"` + + // The calendar year. + Year uint `json:"year,omitzero"` + + // The calendar month, represented as the integers 1 <= month <= 12. + // + // If this property is set, then either the `year` or the `day` property MUST be set. + Month uint `json:"month,omitzero"` + + // The calendar month day, represented as the integers 1 <= day <= 31, depending on the validity + // within the month and year. + // + // If this property is set, then the `month` property MUST be set. + Day uint `json:"day,omitzero"` + + // The calendar system in which this date occurs, in lowercase. + // + // This MUST be either a calendar system name registered as a Common Locale Data Repository [CLDR] [RFC7529] + // or a vendor-specific value. + // + // The year, month, and day still MUST be represented in the Gregorian calendar. + // + // Note that the year property might be required to convert the date between the Gregorian calendar + // and the respective calendar system. + // + // [CLDR]: https://github.com/unicode-org/cldr/blob/latest/common/bcp47/calendar.xml + // [RFC7529]: https://www.rfc-editor.org/rfc/rfc7529.html + CalendarScale string `json:"calendarScale,omitempty"` +} + +func (_ PartialDate) isAnniversaryDate() { +} + +var _ AnniversaryDate = &PartialDate{} + +type Timestamp struct { + // The JSContact type of the object; the value MUST be `Timestamp`, if set. + Type TypeOfTimestamp `json:"@type,omitempty"` + + // The point in time in UTC time (UTCDateTime). + Utc time.Time `json:"utc"` +} + +var _ AnniversaryDate = &Timestamp{} + +func (_ Timestamp) isAnniversaryDate() { +} + +type Anniversary struct { + // The JSContact type of the object: the value MUST be `Anniversary`, if set. + Type TypeOfAnniversary `json:"@type,omitempty"` + + // The kind of anniversary. + // + // The enumerated values are: + // ! `birth`: a birthday anniversary + // ! `death`: a deathday anniversary + // ! `wedding`: a wedding day anniversary + Kind AnniversaryKind `json:"kind"` + + // The date of the anniversary in the Gregorian calendar. + // + // This MUST be either a whole or partial calendar date or a complete UTC timestamp + // (see the definition of the `Timestamp` and `PartialDate` object types). + Date AnniversaryDate `json:"date"` +} + +type Author struct { + // The JSContact type of the object: the value MUST be `Author`, if set. + Type TypeOfAuthor `json:"@type,omitempty"` + + // The name of this author. + Name string `json:"name,omitempty"` + + // The URI value that identifies the author. + Uri string `json:"uri,omitempty"` +} + +type Note struct { + // The JSContact type of the object: the value MUST be `Note`, if set. + Type TypeOfNote `json:"@type,omitempty"` + + // The free-text value of this note. + Note string `json:"note"` + + // The date and time when this note was created. + Created time.Time `json:"created,omitzero"` + + // The author of this note. + Author *Author `json:"author,omitempty"` +} + +type PersonalInfo struct { + // The JSContact type of the object: the value MUST be `PersonalInfo`, if set. + Type TypeOfPersonalInfo `json:"@type,omitempty"` + + // The kind of personal information. + // + // The enumerated values are: + // ! `expertise`: a field of expertise or a credential + // ! `hobby`: a hobby + // ! `interest`: an interest + Kind PersonalInfoKind `json:"kind"` + + // The actual information. + Value string `json:"value"` + + // The level of expertise or engagement in hobby or interest. + // + // The enumerated values are: + // ! `high` + // ! `medium` + // ! `low` + Level PersonalInfoLevel `json:"level,omitempty"` + + // The position of the personal information in the list of all `PersonalInfo` objects that + // have the same kind property value in the Card. + // + // If set, the `listAs` value MUST be higher than zero. + // + // Multiple personal information entries MAY have the same `listAs` property value or none. + // + // Sorting such same-valued entries is implementation-specific. + ListAs uint `json:"listAs,omitzero"` + + // A [custom label]. + // + // The labels associated with the contact data. + // + // Such labels may be set for phone numbers, email addresses, and other resources. + // + // Typically, these labels are displayed along with their associated contact data in graphical user interfaces. + // + // Note that succinct labels are best for proper display on small graphical interfaces and screens. + // + // [custom label]: https://www.rfc-editor.org/rfc/rfc9553.html#prop-label + Label string `json:"label,omitempty"` +} + +// A ContactCard object contains information about a person, company, or other entity, or represents a group of such entities. +// +// It is a JSCard (JSContact) object, as defined in [RFC9553], with two additional properties. +// +// A contact card with a `kind` property equal to `group` represents a group of contacts. +// Clients often present these separately from other contact cards. +// +// The `members` property, as defined in RFC XXX, Section XXX, contains a set of UIDs for other contacts that are the members +// of this group. +// Clients should consider the group to contain any `ContactCard` with a matching UID, from any account they have access to with +// support for the `urn:ietf:params:jmap:contacts` capability. +// UIDs that cannot be found SHOULD be ignored but preserved. +// +// For example, suppose a user adds contacts from a shared address book to their private group, then temporarily loses access to +// this address book. +// The UIDs cannot be resolved so the contacts will disappear from the group. +// However, if they are given permission to access the data again the UIDs will be found and the contacts will reappear. +// +// [RFC9553]: https://www.rfc-editor.org/rfc/rfc9553.html +type ContactCard struct { + // The id of the Card (immutable; server-set). + // + // The id uniquely identifies a Card with a particular “uid” within a particular account. + // + // This is a JMAP extension and not part of [RFC9553]. + // + // [RFC9553]: https://www.rfc-editor.org/rfc/rfc9553.html + Id string `json:"id,omitempty"` + + // The set of AddressBook ids this Card belongs to. + // + // A card MUST belong to at least one AddressBook at all times (until it is destroyed). + // + // The set is represented as an object, with each key being an AddressBook id. + // + // The value for each key in the object MUST be true. + // + // This is a JMAP extension and not part of [RFC9553]. + // + // [RFC9553]: https://www.rfc-editor.org/rfc/rfc9553.html + AddressBookIds map[string]bool `json:"addressBookIds,omitempty"` + + // The JSContact type of the Card object: the value MUST be "Card". + Type TypeOfContactCard `json:"@type,omitempty"` + + // The JSContact version of this Card. + // + // The value MUST be one of the IANA-registered JSContact Version values for the version property. + // + // example: 1.0 + Version JSContactVersion `json:"version"` + + // The date and time when the Card was created (UTCDateTime). + // + // example: 2022-09-30T14:35:10Z + Created time.Time `json:"created,omitzero"` + + // The kind of the entity the Card represents (default: `individual``). + // + // Values are: + // !- `individual``: a single person + // !- group: a group of people or entities + // !- org: an organization + // !- location: a named location + // !- device: a device such as an appliance, a computer, or a network element + // !- application: a software application + // + // example: individual + Kind ContactCardKind `json:"kind,omitempty"` + + // The language tag, as defined in [RFC5646]. + // + // The language tag that best describes the language used for text in the Card, optionally including + // additional information such as the script. + // + // Note that values MAY be localized in the `localizations` property. + // + // [RFC5646]: https://www.rfc-editor.org/rfc/rfc5646.html + // + // example: de-AT + Language string `json:"language,omitempty"` + + // The set of Cards that are members of this group Card. + // + // Each key in the set is the uid property value of the member, and each boolean value MUST be `true`. + // If this property is set, then the value of the kind property MUST be `group`. + // + // The opposite is not true. A group Card will usually contain the members property to specify the members + // of the group, but it is not required to. + // + // A group Card without the members property can be considered an abstract grouping or one whose members + // are known empirically (e.g., `IETF Participants`). + Members map[string]bool `json:"members,omitempty"` + + // The identifier for the product that created the Card. + // + // If set, the value MUST be at least one character long. + // + // example: ACME Contacts App version 1.23.5 + ProdId string `json:"prodId,omitempty"` + + // The set of Card objects that relate to the Card. + // + // The value is a map, where each key is the uid property value of the related Card, and the value + // defines the relation + // + // ```json + // { + // "relatedTo": { + // "urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6": { + // "relation": {"friend": true} + // }, + // "8cacdfb7d1ffdb59@example.com": { + // "relation": {} + // } + // } + // } + // ``` + RelatedTo map[string]Relation `json:"relatedTo,omitempty"` + + // An identifier that associates the object as the same across different systems, address books, and views. + // + // The value SHOULD be a URN [RFC8141], but for compatibility with [RFC6350], it MAY also be a URI [RFC3986] + // or free-text value. + // + // The value of the URN SHOULD be in the "uuid" namespace [RFC9562]. + // + // [RFC9562] describes multiple versions of Universally Unique IDentifiers (UUIDs); UUID version 4 is RECOMMENDED. + // + // [RFC8141]: https://www.rfc-editor.org/rfc/rfc8141.html + // [RFC6350]: https://www.rfc-editor.org/rfc/rfc6350.html + // [RFC9562]: https://www.rfc-editor.org/rfc/rfc9562.html + // + // example: urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6 + Uid string `json:"uid,omitempty"` + + // The date and time when the data in the Card was last modified (UTCDateTime). + // + // example: 2021-10-31T22:27:10Z + Updated time.Time `json:"updated,omitzero"` + + // The name of the entity represented by the Card. + // + // This can be any type of name, e.g., it can, but need not, be the legal name of a person. + Name *Name `json:"name,omitempty"` + + // The nicknames of the entity represented by the Card. + Nicknames map[string]Nickname `json:"nicknames,omitempty"` + + // The company or organization names and units associated with the Card. + Organizations map[string]Organization `json:"organizations,omitempty"` + + // The information that directs how to address, speak to, or refer to the entity that is represented by the Card. + SpeakToAs *SpeakToAs `json:"speakToAs,omitempty"` + + // The job titles or functional positions of the entity represented by the Card. + Titles map[string]Title `json:"titles,omitempty"` + + // The email addresses in which to contact the entity represented by the Card. + Emails map[string]EmailAddress `json:"emails,omitempty"` + + // The online services that are associated with the entity represented by the Card. + // + // This can be messaging services, social media profiles, and other. + OnlineServices map[string]OnlineService `json:"onlineServices,omitempty"` + + // The phone numbers by which to contact the entity represented by the Card. + Phones map[string]Phone `json:"phones,omitempty"` + + // The preferred languages for contacting the entity associated with the Card. + PreferredLanguages map[string]LanguagePref `json:"preferredLanguages,omitempty"` + + // The calendaring resources of the entity represented by the Card, such as to look up free-busy information. + // + // A Calendar object has all properties of the Resource data type, with the following additional definitions: + // !- The `@type` property value MUST be `Calendar`, if set + // !- The `kind` property is mandatory. Its enumerated values are: + // !- `calendar`: The resource is a calendar that contains entries such as calendar events or tasks + // !- `freeBusy`: The resource allows for free-busy lookups, for example, to schedule group events + Calendars map[string]Calendar `json:"calendars,omitempty"` + + // The scheduling addresses by which the entity may receive calendar scheduling invitations. + SchedulingAddresses map[string]SchedulingAddress `json:"schedulingAddresses,omitempty"` + + // The addresses of the entity represented by the Card, such as postal addresses or geographic locations. + Addresses map[string]Address `json:"addresses,omitempty"` + + // The cryptographic resources such as public keys and certificates associated with the entity represented by the Card. + // + // A CryptoKey object has all properties of the `Resource` data type, with the following additional definition: + // the `@type` property value MUST be `CryptoKey`, if set. + // + // The following example shows how to refer to an external cryptographic resource: + // ``` + // "cryptoKeys": { + // "mykey1": { + // "uri": "https://www.example.com/keys/jdoe.cer" + // } + // } + // ``` + CryptoKeys map[string]CryptoKey `json:"cryptoKeys,omitempty"` + + // The directories containing information about the entity represented by the Card. + // + // A Directory object has all properties of the `Resource` data type, with the following additional definitions: + // !- The `@type` property value MUST be `Directory`, if set + // !- The `kind` property is mandatory; tts enumerated values are: + // !- `directory`: the resource is a directory service that the entity represented by the Card is a part of; this + // typically is an organizational directory that also contains associated entities, e.g., co-workers and management + // in a company directory + // !- `entry`: the resource is a directory entry of the entity represented by the Card; in contrast to the `directory` + // type, this is the specific URI for the entity within a directory + Directories map[string]Directory `json:"directories,omitempty"` + + // The links to resources that do not fit any of the other use-case-specific resource properties. + // + // A Link object has all properties of the `Resource` data type, with the following additional definitions: + // !- The `@type` property value MUST be `Link`, if set + // !- The `kind` property is optional; tts enumerated values are: + // !- `contact``: the resource is a URI by which the entity represented by the Card may be contacted; + // this includes web forms or other media that require user interaction + Links map[string]Link `json:"links,omitempty"` + + // The media resources such as photographs, avatars, or sounds that are associated with the entity represented by the Card. + // + // A Media object has all properties of the Resource data type, with the following additional definitions: + // !- the `@type` property value MUST be `Media`, if set + // !- the `kind` property is mandatory; its enumerated values are: + // !- `photo`: the resource is a photograph or avatar + // !- `sound`: the resource is audio media, e.g., to specify the proper pronunciation of the name property contents + // !- `logo`: the resource is a graphic image or logo associated with the entity represented by the Card + Media map[string]Media `json:"media,omitempty"` + + // The property values localized to languages other than the main `language` of the Card. + // + // Localizations provide language-specific alternatives for existing property values and SHOULD NOT add new properties. + // + // The keys in the localizations property value are language tags [RFC5646]; the values are of type `PatchObject` and + // localize the Card in that language tag. + // + // The paths in the `PatchObject` are relative to the Card that includes the localizations property. + // + // A patch MUST NOT target the localizations property. + // + // Conceptually, a Card is localized as follows: + // !- Determine the language tag in which the Card should be localized. + // !- If the localizations property includes a key for that language, obtain the PatchObject value; + // if there is no such key, stop. + // !- Create a copy of the Card, but do not copy the localizations property. + // !- Apply all patches in the PatchObject to the copy of the Card. + // !- Optionally, set the language property in the copy of the Card. + // !- Use the patched copy of the Card as the localized variant of the original Card. + // + // A patch in the `PatchObject` may contain any value type. + // + // Its value MUST be a valid value according to the definition of the patched property. + // + // [RFC5646]: https://www.rfc-editor.org/rfc/rfc5646.html + Localizations map[string]PatchObject `json:"localizations,omitempty"` + + // The memorable dates and events for the entity represented by the Card. + Anniversaries map[string]Anniversary `json:"anniversaries,omitempty"` + + // The set of free-text keywords, also known as tags. + // + // Each key in the set is a keyword, and each boolean value MUST be `true`. + Keywords map[string]bool `json:"keywords,omitempty"` + + // The free-text notes that are associated with the Card. + Notes map[string]Note `json:"notes,omitempty"` + + // The personal information of the entity represented by the Card. + PersonalInfo map[string]PersonalInfo `json:"personalInfo,omitempty"` +} + +const ( + ContactCardPropertyId = "id" + ContactCardPropertyAddressBookIds = "addressBookIds" + ContactCardPropertyType = "@type" + ContactCardPropertyVersion = "version" + ContactCardPropertyCreated = "created" + ContactCardPropertyKind = "kind" + ContactCardPropertyLanguage = "language" + ContactCardPropertyMembers = "members" + ContactCardPropertyProdId = "prodId" + ContactCardPropertyRelatedTo = "relatedTo" + ContactCardPropertyUid = "uid" + ContactCardPropertyUpdated = "updated" + ContactCardPropertyName = "name" + ContactCardPropertyNicknames = "nicknames" + ContactCardPropertyOrganizations = "organizations" + ContactCardPropertySpeakToAs = "speakToAs" + ContactCardPropertyTitles = "titles" + ContactCardPropertyEmails = "emails" + ContactCardPropertyOnlineServices = "onlineServices" + ContactCardPropertyPhones = "phones" + ContactCardPropertyPreferredLanguages = "preferredLanguages" + ContactCardPropertyCalendars = "calendars" + ContactCardPropertySchedulingAddresses = "schedulingAddresses" + ContactCardPropertyAddresses = "addresses" + ContactCardPropertyCryptoKeys = "cryptoKeys" + ContactCardPropertyDirectories = "directories" + ContactCardPropertyLinks = "links" + ContactCardPropertyMedia = "media" + ContactCardPropertyLocalizations = "localizations" + ContactCardPropertyAnniversaries = "anniversaries" + ContactCardPropertyKeywords = "keywords" + ContactCardPropertyNotes = "notes" + ContactCardPropertyPersonalInfo = "personalInfo" +) + +var ContactCardProperties = []string{ + ContactCardPropertyId, + ContactCardPropertyAddressBookIds, + ContactCardPropertyType, + ContactCardPropertyVersion, + ContactCardPropertyCreated, + ContactCardPropertyKind, + ContactCardPropertyLanguage, + ContactCardPropertyMembers, + ContactCardPropertyProdId, + ContactCardPropertyRelatedTo, + ContactCardPropertyUid, + ContactCardPropertyUpdated, + ContactCardPropertyName, + ContactCardPropertyNicknames, + ContactCardPropertyOrganizations, + ContactCardPropertySpeakToAs, + ContactCardPropertyTitles, + ContactCardPropertyEmails, + ContactCardPropertyOnlineServices, + ContactCardPropertyPhones, + ContactCardPropertyPreferredLanguages, + ContactCardPropertyCalendars, + ContactCardPropertySchedulingAddresses, + ContactCardPropertyAddresses, + ContactCardPropertyCryptoKeys, + ContactCardPropertyDirectories, + ContactCardPropertyLinks, + ContactCardPropertyMedia, + ContactCardPropertyLocalizations, + ContactCardPropertyAnniversaries, + ContactCardPropertyKeywords, + ContactCardPropertyNotes, + ContactCardPropertyPersonalInfo, +} diff --git a/pkg/jscontact/jscontact_model_test.go b/pkg/jscontact/jscontact_model_test.go new file mode 100644 index 0000000000..684bf69556 --- /dev/null +++ b/pkg/jscontact/jscontact_model_test.go @@ -0,0 +1,1252 @@ +package jscontact + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func jsoneq[X any](t *testing.T, expected string, object X) { + data, err := json.MarshalIndent(object, "", "") + require.NoError(t, err) + require.JSONEq(t, expected, string(data)) + + var rec X + err = json.Unmarshal(data, &rec) + require.NoError(t, err) + require.Equal(t, object, rec) +} + +func TestCalendar(t *testing.T) { + jsoneq(t, `{ + "@type": "Calendar", + "kind": "calendar", + "uri": "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025", + "mediaType": "application/jscontact+json", + "contexts": { + "work": true + }, + "label": "test" + }`, Calendar{ + Type: CalendarType, + Kind: CalendarKindCalendar, + Uri: "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025", + MediaType: "application/jscontact+json", + Contexts: map[CalendarContext]bool{ + CalendarContextWork: true, + }, + Pref: 0, + Label: "test", + }) +} + +func TestLink(t *testing.T) { + jsoneq(t, `{ + "@type": "Link", + "kind": "contact", + "uri": "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025", + "mediaType": "application/jscontact+json", + "contexts": { + "work": true + }, + "label": "test" + }`, Link{ + Type: LinkType, + Kind: LinkKindContact, + Uri: "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025", + MediaType: "application/jscontact+json", + Contexts: map[LinkContext]bool{ + LinkContextWork: true, + }, + Pref: 0, + Label: "test", + }) +} + +func TestCryptoKey(t *testing.T) { + jsoneq(t, `{ + "@type": "CryptoKey", + "uri": "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025.pgp", + "mediaType": "application/pgp-keys", + "contexts": { + "work": true + }, + "label": "test" + }`, CryptoKey{ + Type: CryptoKeyType, + Uri: "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025.pgp", + MediaType: "application/pgp-keys", + Contexts: map[CryptoKeyContext]bool{ + CryptoKeyContextWork: true, + }, + Pref: 0, + Label: "test", + }) +} + +func TestDirectory(t *testing.T) { + jsoneq(t, `{ + "@type": "Directory", + "kind": "entry", + "uri": "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025", + "mediaType": "application/jscontact+json", + "contexts": { + "work": true + }, + "label": "test", + "listAs": 3 + }`, Directory{ + Type: DirectoryType, + Kind: DirectoryKindEntry, + Uri: "https://opencloud.eu/calendar/d05779b6-9638-4694-9869-008a61df6025", + MediaType: "application/jscontact+json", + Contexts: map[DirectoryContext]bool{ + DirectoryContextWork: true, + }, + Pref: 0, + Label: "test", + ListAs: 3, + }) +} + +func TestMedia(t *testing.T) { + jsoneq(t, `{ + "@type": "Media", + "kind": "logo", + "uri": "https://opencloud.eu/opencloud.svg", + "mediaType": "image/svg+xml", + "contexts": { + "work": true + }, + "label": "test", + "blobId": "1d92cf97e32b42ceb5538f0804a41891" + }`, Media{ + Type: MediaType, + Kind: MediaKindLogo, + Uri: "https://opencloud.eu/opencloud.svg", + MediaType: "image/svg+xml", + Contexts: map[MediaContext]bool{ + MediaContextWork: true, + }, + Pref: 0, + Label: "test", + BlobId: "1d92cf97e32b42ceb5538f0804a41891", + }) +} + +func TestRelation(t *testing.T) { + jsoneq(t, `{ + "@type": "Relation", + "relation": { + "co-worker": true, + "friend": true + } + }`, Relation{ + Type: RelationType, + Relation: map[Relationship]bool{ + RelationCoWorker: true, + RelationFriend: true, + }, + }) +} + +func TestNameComponent(t *testing.T) { + jsoneq(t, `{ + "@type": "NameComponent", + "value": "Robert", + "kind": "given", + "phonetic": "Bob" + }`, NameComponent{ + Type: NameComponentType, + Value: "Robert", + Kind: NameComponentKindGiven, + Phonetic: "Bob", + }) +} + +func TestNickname(t *testing.T) { + jsoneq(t, `{ + "@type": "Nickname", + "name": "Bob", + "contexts": { + "private": true + }, + "pref": 3 + }`, Nickname{ + Type: NicknameType, + Name: "Bob", + Contexts: map[NicknameContext]bool{ + NicknameContextPrivate: true, + }, + Pref: 3, + }) +} + +func TestOrgUnit(t *testing.T) { + jsoneq(t, `{ + "@type": "OrgUnit", + "name": "Skynet", + "sortAs": "SKY" + }`, OrgUnit{ + Type: OrgUnitType, + Name: "Skynet", + SortAs: "SKY", + }) +} + +func TestOrganization(t *testing.T) { + jsoneq(t, `{ + "@type": "Organization", + "name": "Cyberdyne", + "sortAs": "CYBER", + "units": [{ + "@type": "OrgUnit", + "name": "Skynet", + "sortAs": "SKY" + }, { + "@type": "OrgUnit", + "name": "Cybernics" + } + ], + "contexts": { + "work": true + } + }`, Organization{ + Type: OrganizationType, + Name: "Cyberdyne", + SortAs: "CYBER", + Units: []OrgUnit{ + { + Type: OrgUnitType, + Name: "Skynet", + SortAs: "SKY", + }, + { + Type: OrgUnitType, + Name: "Cybernics", + }, + }, + Contexts: map[OrganizationContext]bool{ + OrganizationContextWork: true, + }, + }) +} + +func TestPronouns(t *testing.T) { + jsoneq(t, `{ + "@type": "Pronouns", + "pronouns": "they/them", + "contexts": { + "work": true, + "private": true + }, + "pref": 1 + }`, Pronouns{ + Type: PronounsType, + Pronouns: "they/them", + Contexts: map[PronounsContext]bool{ + PronounsContextWork: true, + PronounsContextPrivate: true, + }, + Pref: 1, + }) +} + +func TestTitle(t *testing.T) { + jsoneq(t, `{ + "@type": "Title", + "name": "Doctor", + "kind": "title", + "organizationId": "407e1992-9a2b-4e4f-a11b-85a509a4b5ae" + }`, Title{ + Type: TitleType, + Name: "Doctor", + Kind: TitleKindTitle, + OrganizationId: "407e1992-9a2b-4e4f-a11b-85a509a4b5ae", + }) +} + +func TestSpeakToAs(t *testing.T) { + jsoneq(t, `{ + "@type": "SpeakToAs", + "grammaticalGender": "neuter", + "pronouns": { + "a": { + "@type": "Pronouns", + "pronouns": "they/them", + "contexts": { + "private": true + }, + "pref": 1 + }, + "b": { + "@type": "Pronouns", + "pronouns": "he/him", + "contexts": { + "work": true + }, + "pref": 99 + } + } + }`, SpeakToAs{ + Type: SpeakToAsType, + GrammaticalGender: GrammaticalGenderNeuter, + Pronouns: map[string]Pronouns{ + "a": { + Type: PronounsType, + Pronouns: "they/them", + Contexts: map[PronounsContext]bool{ + PronounsContextPrivate: true, + }, + Pref: 1, + }, + "b": { + Type: PronounsType, + Pronouns: "he/him", + Contexts: map[PronounsContext]bool{ + PronounsContextWork: true, + }, + Pref: 99, + }, + }, + }) +} + +func TestName(t *testing.T) { + jsoneq(t, `{ + "@type": "Name", + "components": [ + { "@type": "NameComponent", "kind": "given", "value": "Diego", "phonetic": "/di\u02C8e\u026A\u0261əʊ/" }, + { "kind": "surname", "value": "Rivera" }, + { "kind": "surname2", "value": "Barrientos" } + ], + "isOrdered": true, + "defaultSeparator": " ", + "full": "Diego Rivera Barrientos", + "sortAs": { + "surname": "Rivera Barrientos", + "given": "Diego" + } + }`, Name{ + Type: NameType, + Components: []NameComponent{ + { + Type: NameComponentType, + Value: "Diego", + Kind: NameComponentKindGiven, + Phonetic: "/diˈeɪɡəʊ/", + }, + { + Value: "Rivera", + Kind: NameComponentKindSurname, + }, + { + Value: "Barrientos", + Kind: NameComponentKindSurname2, + }, + }, + IsOrdered: true, + DefaultSeparator: " ", + Full: "Diego Rivera Barrientos", + SortAs: map[string]string{ + string(NameComponentKindSurname): "Rivera Barrientos", + string(NameComponentKindGiven): "Diego", + }, + }) +} + +func TestEmailAddress(t *testing.T) { + jsoneq(t, `{ + "@type": "EmailAddress", + "address": "camina@opa.org", + "contexts": { + "work": true, + "private": true + }, + "pref": 1, + "label": "bosmang" + }`, EmailAddress{ + Type: EmailAddressType, + Address: "camina@opa.org", + Contexts: map[EmailAddressContext]bool{ + EmailAddressContextWork: true, + EmailAddressContextPrivate: true, + }, + Pref: 1, + Label: "bosmang", + }) +} + +func TestOnlineService(t *testing.T) { + jsoneq(t, `{ + "@type": "OnlineService", + "service": "OPA Network", + "contexts": { + "work": true + }, + "uri": "https://opa.org/cdrummer", + "user": "cdrummer@opa.org", + "pref": 12, + "label": "opa" + }`, OnlineService{ + Type: OnlineServiceType, + Service: "OPA Network", + Contexts: map[OnlineServiceContext]bool{ + OnlineServiceContextWork: true, + }, + Uri: "https://opa.org/cdrummer", + User: "cdrummer@opa.org", + Pref: 12, + Label: "opa", + }) +} + +func TestPhone(t *testing.T) { + jsoneq(t, `{ + "@type": "Phone", + "number": "+15551234567", + "features": { + "text": true, + "main-number": true, + "cell": true, + "video": true, + "voice": true + }, + "contexts": { + "work": true, + "private": true + }, + "pref": 42, + "label": "opa" + }`, Phone{ + Type: PhoneType, + Number: "+15551234567", + Features: map[PhoneFeature]bool{ + PhoneFeatureText: true, + PhoneFeatureMainNumber: true, + PhoneFeatureMobile: true, + PhoneFeatureVideo: true, + PhoneFeatureVoice: true, + }, + Contexts: map[PhoneContext]bool{ + PhoneContextWork: true, + PhoneContextPrivate: true, + }, + Pref: 42, + Label: "opa", + }) +} + +func TestLanguagePref(t *testing.T) { + jsoneq(t, `{ + "@type": "LanguagePref", + "language": "fr-BE", + "contexts": { + "private": true + }, + "pref": 2 + }`, LanguagePref{ + Type: LanguagePrefType, + Language: "fr-BE", + Contexts: map[LanguagePrefContext]bool{ + LanguagePrefContextPrivate: true, + }, + Pref: 2, + }) +} + +func TestSchedulingAddress(t *testing.T) { + jsoneq(t, `{ + "@type": "SchedulingAddress", + "uri": "mailto:camina@opa.org", + "contexts": { + "work": true + }, + "pref": 3, + "label": "opa" + }`, SchedulingAddress{ + Type: SchedulingAddressType, + Uri: "mailto:camina@opa.org", + Label: "opa", + Contexts: map[SchedulingAddressContext]bool{ + SchedulingAddressContextWork: true, + }, + Pref: 3, + }) +} + +func TestAddressComponent(t *testing.T) { + jsoneq(t, `{ + "@type": "AddressComponent", + "kind": "postcode", + "value": "12345", + "phonetic": "un-deux-trois-quatre-cinq" + }`, AddressComponent{ + Type: AddressComponentType, + Kind: AddressComponentKindPostcode, + Value: "12345", + Phonetic: "un-deux-trois-quatre-cinq", + }) +} + +func TestAddress(t *testing.T) { + jsoneq(t, `{ + "@type": "Address", + "contexts": { + "delivery": true, + "work": true + }, + "components": [ + {"@type": "AddressComponent", "kind": "number", "value": "54321"}, + {"kind": "separator", "value": " "}, + {"kind": "name", "value": "Oak St"}, + {"kind": "locality", "value": "Reston"}, + {"kind": "region", "value": "VA"}, + {"kind": "separator", "value": " "}, + {"kind": "postcode", "value": "20190"}, + {"kind": "country", "value": "USA"} + ], + "countryCode": "US", + "defaultSeparator": ", ", + "isOrdered": true + }`, Address{ + Type: AddressType, + Contexts: map[AddressContext]bool{ + AddressContextDelivery: true, + AddressContextWork: true, + }, + Components: []AddressComponent{ + {Type: AddressComponentType, Kind: AddressComponentKindNumber, Value: "54321"}, + {Kind: AddressComponentKindSeparator, Value: " "}, + {Kind: AddressComponentKindName, Value: "Oak St"}, + {Kind: AddressComponentKindLocality, Value: "Reston"}, + {Kind: AddressComponentKindRegion, Value: "VA"}, + {Kind: AddressComponentKindSeparator, Value: " "}, + {Kind: AddressComponentKindPostcode, Value: "20190"}, + {Kind: AddressComponentKindCountry, Value: "USA"}, + }, + CountryCode: "US", + DefaultSeparator: ", ", + IsOrdered: true, + }) +} + +func TestPartialDate(t *testing.T) { + jsoneq(t, `{ + "@type": "PartialDate", + "year": 2025, + "month": 9, + "day": 25, + "calendarScale": "iso8601" + }`, PartialDate{ + Type: PartialDateType, + Year: 2025, + Month: 9, + Day: 25, + CalendarScale: "iso8601", + }) +} + +func TestTimestamp(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14.094725532+02:00") + require.NoError(t, err) + jsoneq(t, `{ + "@type": "Timestamp", + "utc": "2025-09-25T18:26:14.094725532+02:00" + }`, &Timestamp{ + Type: TimestampType, + Utc: ts, + }) +} + +func TestAnniversaryWithPartialDate(t *testing.T) { + jsoneq(t, `{ + "@type": "Anniversary", + "kind": "birth", + "date": { + "@type": "PartialDate", + "year": 2025, + "month": 9, + "day": 25 + } + }`, Anniversary{ + Type: AnniversaryType, + Kind: AnniversaryKindBirth, + Date: &PartialDate{ + Type: PartialDateType, + Year: 2025, + Month: 9, + Day: 25, + }, + }) +} + +func TestAnniversaryWithTimestamp(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14.094725532+02:00") + require.NoError(t, err) + + jsoneq(t, `{ + "@type": "Anniversary", + "kind": "birth", + "date": { + "@type": "Timestamp", + "utc": "2025-09-25T18:26:14.094725532+02:00" + } + }`, Anniversary{ + Type: AnniversaryType, + Kind: AnniversaryKindBirth, + Date: &Timestamp{ + Type: TimestampType, + Utc: ts, + }, + }) +} + +func TestAuthor(t *testing.T) { + jsoneq(t, `{ + "@type": "Author", + "name": "Camina Drummer", + "uri": "https://opa.org/cdrummer" + }`, Author{ + Type: AuthorType, + Name: "Camina Drummer", + Uri: "https://opa.org/cdrummer", + }) +} + +func TestNote(t *testing.T) { + ts, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14.094725532+02:00") + require.NoError(t, err) + + jsoneq(t, `{ + "@type": "Note", + "note": "this is a note", + "created": "2025-09-25T18:26:14.094725532+02:00", + "author": { + "@type": "Author", + "name": "Camina Drummer", + "uri": "https://opa.org/cdrummer" + } + }`, Note{ + Type: NoteType, + Note: "this is a note", + Created: ts, + Author: &Author{ + Type: AuthorType, + Name: "Camina Drummer", + Uri: "https://opa.org/cdrummer", + }, + }) +} + +func TestPersonalInfo(t *testing.T) { + jsoneq(t, `{ + "@type": "PersonalInfo", + "kind": "expertise", + "value": "motivation", + "level": "high", + "listAs": 1, + "label": "opa" + }`, PersonalInfo{ + Type: PersonalInfoType, + Kind: PersonalInfoKindExpertise, + Value: "motivation", + Level: PersonalInfoLevelHigh, + ListAs: 1, + Label: "opa", + }) +} + +func TestContactCard(t *testing.T) { + created, err := time.Parse(time.RFC3339, "2025-09-25T18:26:14.094725532+02:00") + require.NoError(t, err) + + updated, err := time.Parse(time.RFC3339, "2025-09-26T09:58:01+02:00") + require.NoError(t, err) + + jsoneq(t, `{ + "@type": "Card", + "kind": "group", + "id": "20fba820-2f8e-432d-94f1-5abbb59d3ed7", + "addressBookIds": { + "79047052-ae0e-4299-8860-5bff1a139f3d": true, + "44eb6105-08c1-458b-895e-4ad1149dfabd": true + }, + "version": "1.0", + "created": "2025-09-25T18:26:14.094725532+02:00", + "language": "fr-BE", + "members": { + "314815dd-81c8-4640-aace-6dc83121616d": true, + "c528b277-d8cb-45f2-b7df-1aa3df817463": true, + "81dea240-c0a4-4929-82e7-79e713a8bbe4": true + }, + "prodId": "OpenCloud Groupware 1.0", + "relatedTo": { + "urn:uid:ca9d2a62-e068-43b6-a470-46506976d505": { + "@type": "Relation", + "relation": { + "contact": true + } + }, + "urn:uid:72183ec2-b218-4983-9c89-ff117eeb7c5e": { + "relation": { + "emergency": true, + "spouse": true + } + } + }, + "uid": "1091f2bb-6ae6-4074-bb64-df74071d7033", + "updated": "2025-09-26T09:58:01+02:00", + "name": { + "@type": "Name", + "components": [ + {"@type": "NameComponent", "value": "OpenCloud", "kind": "surname"}, + {"value": " ", "kind": "separator"}, + {"value": "Team", "kind": "surname2"} + ], + "isOrdered": true, + "defaultSeparator": ", ", + "sortAs": { + "surname": "OpenCloud Team" + }, + "full": "OpenCloud Team" + }, + "nicknames": { + "a": { + "@type": "Nickname", + "name": "The Team", + "contexts": { + "work": true + }, + "pref": 1 + } + }, + "organizations": { + "o": { + "@type": "Organization", + "name": "OpenCloud GmbH", + "units": [ + {"@type": "OrgUnit", "name": "Marketing", "sortAs": "marketing"}, + {"@type": "OrgUnit", "name": "Sales"}, + {"name": "Operations", "sortAs": "ops"} + ], + "sortAs": "opencloud", + "contexts": { + "work": true + } + } + }, + "speakToAs": { + "@type": "SpeakToAs", + "grammaticalGender": "inanimate", + "pronouns": { + "p": { + "@type": "Pronouns", + "pronouns": "it", + "contexts": { + "work": true + }, + "pref": 1 + } + } + }, + "titles": { + "t": { + "@type": "Title", + "name": "The", + "kind": "title", + "organizationId": "o" + } + }, + "emails": { + "e": { + "@type": "EmailAddress", + "address": "info@opencloud.eu.example.com", + "contexts": { + "work": true + }, + "pref": 1, + "label": "work" + } + }, + "onlineServices": { + "s": { + "@type": "OnlineService", + "service": "The Misinformation Game", + "uri": "https://misinfogame.com/91886aa0-3586-4ade-b9bb-ec031464a251", + "user": "opencloudeu", + "contexts": { + "work": true + }, + "pref": 1, + "label": "imaginary" + } + }, + "phones": { + "p": { + "@type": "Phone", + "number": "+1-804-222-1111", + "features": { + "voice": true, + "text": true + }, + "contexts": { + "work": true + }, + "pref": 1, + "label": "imaginary" + } + }, + "preferredLanguages": { + "wa": { + "@type": "LanguagePref", + "language": "wa-BE", + "contexts": { + "private": true + }, + "pref": 1 + }, + "de": { + "language": "de-DE", + "contexts": { + "work": true + }, + "pref": 2 + } + }, + "calendars": { + "c": { + "@type": "Calendar", + "kind": "calendar", + "uri": "https://opencloud.eu/calendars/521b032b-a2b3-4540-81b9-3f6bccacaab2", + "mediaType": "application/jscontact+json", + "contexts": { + "work": true + }, + "pref": 1, + "label": "work" + } + }, + "schedulingAddresses": { + "s": { + "@type": "SchedulingAddress", + "uri": "mailto:scheduling@opencloud.eu.example.com", + "contexts": { + "work": true + }, + "pref": 1, + "label": "work" + } + }, + "addresses": { + "k26": { + "@type": "Address", + "components": [ + {"@type": "AddressComponent", "kind": "block", "value": "2-7"}, + {"kind": "separator", "value": "-"}, + {"kind": "number", "value": "2"}, + {"kind": "separator", "value": " "}, + {"kind": "district", "value": "Marunouchi"}, + {"kind": "locality", "value": "Chiyoda-ku"}, + {"kind": "region", "value": "Tokyo"}, + {"kind": "separator", "value": " "}, + {"kind": "postcode", "value": "100-8994"} + ], + "isOrdered": true, + "defaultSeparator": ", ", + "full": "2-7-2 Marunouchi, Chiyoda-ku, Tokyo 100-8994", + "countryCode": "JP", + "coordinates": "geo:35.6796373,139.7616907", + "timeZone": "JST", + "contexts": { + "delivery": true, + "work": true + }, + "pref": 2 + } + }, + "cryptoKeys": { + "k1": { + "@type": "CryptoKey", + "uri": "https://opencloud.eu.example.com/keys/d550f57c-582c-43cc-8d94-822bded9ab36", + "mediaType": "application/pgp-keys", + "contexts": { + "work": true + }, + "pref": 1, + "label": "keys" + } + }, + "directories": { + "d1": { + "@type": "Directory", + "kind": "entry", + "uri": "https://opencloud.eu.example.com/addressbook/8c2f0363-af0a-4d16-a9d5-8a9cd885d722", + "listAs": 1 + } + }, + "links": { + "r1": { + "@type": "Link", + "kind": "contact", + "uri": "mailto:contact@opencloud.eu.example.com", + "contexts": { + "work": true + } + } + }, + "media": { + "m": { + "@type": "Media", + "kind": "logo", + "uri": "https://opencloud.eu.example.com/opencloud.svg", + "mediaType": "image/svg+xml", + "contexts": { + "work": true + }, + "pref": 123, + "label": "svg", + "blobId": "53feefbabeb146fcbe3e59e91462fa5f" + } + }, + "anniversaries": { + "birth": { + "@type": "Anniversary", + "kind": "birth", + "date": { + "@type": "PartialDate", + "year": 2025, + "month": 9, + "day": 26, + "calendarScale": "iso8601" + } + } + }, + "keywords": { + "imaginary": true, + "test": true + }, + "notes": { + "n1": { + "@type": "Note", + "note": "This is a note.", + "created": "2025-09-25T18:26:14.094725532+02:00", + "author": { + "@type": "Author", + "name": "Test Data", + "uri": "https://isbn.example.com/a461f292-6bf1-470e-b08d-f6b4b0223fe3" + } + } + }, + "personalInfo": { + "p1": { + "@type": "PersonalInfo", + "kind": "expertise", + "value": "Clouds", + "level": "high", + "listAs": 1, + "label": "experts" + } + }, + "localizations": { + "fr": { + "personalInfo": { + "value": "Nuages" + } + } + } + }`, ContactCard{ + Type: ContactCardType, + Kind: ContactCardKindGroup, + Id: "20fba820-2f8e-432d-94f1-5abbb59d3ed7", + AddressBookIds: map[string]bool{ + "79047052-ae0e-4299-8860-5bff1a139f3d": true, + "44eb6105-08c1-458b-895e-4ad1149dfabd": true, + }, + Version: JSContactVersion_1_0, + Created: created, + Language: "fr-BE", + Members: map[string]bool{ + "314815dd-81c8-4640-aace-6dc83121616d": true, + "c528b277-d8cb-45f2-b7df-1aa3df817463": true, + "81dea240-c0a4-4929-82e7-79e713a8bbe4": true, + }, + ProdId: "OpenCloud Groupware 1.0", + RelatedTo: map[string]Relation{ + "urn:uid:ca9d2a62-e068-43b6-a470-46506976d505": { + Type: RelationType, + Relation: map[Relationship]bool{ + RelationContact: true, + }, + }, + "urn:uid:72183ec2-b218-4983-9c89-ff117eeb7c5e": { + Relation: map[Relationship]bool{ + RelationEmergency: true, + RelationSpouse: true, + }, + }, + }, + Uid: "1091f2bb-6ae6-4074-bb64-df74071d7033", + Updated: updated, + Name: &Name{ + Type: NameType, + Components: []NameComponent{ + {Type: NameComponentType, Value: "OpenCloud", Kind: NameComponentKindSurname}, + {Value: " ", Kind: NameComponentKindSeparator}, + {Value: "Team", Kind: NameComponentKindSurname2}, + }, + IsOrdered: true, + DefaultSeparator: ", ", + SortAs: map[string]string{ + string(NameComponentKindSurname): "OpenCloud Team", + }, + Full: "OpenCloud Team", + }, + Nicknames: map[string]Nickname{ + "a": { + Type: NicknameType, + Name: "The Team", + Contexts: map[NicknameContext]bool{ + NicknameContextWork: true, + }, + Pref: 1, + }, + }, + Organizations: map[string]Organization{ + "o": { + Type: OrganizationType, + Name: "OpenCloud GmbH", + Units: []OrgUnit{ + {Type: OrgUnitType, Name: "Marketing", SortAs: "marketing"}, + {Type: OrgUnitType, Name: "Sales"}, + {Name: "Operations", SortAs: "ops"}, + }, + SortAs: "opencloud", + Contexts: map[OrganizationContext]bool{ + OrganizationContextWork: true, + }, + }, + }, + SpeakToAs: &SpeakToAs{ + Type: SpeakToAsType, + GrammaticalGender: GrammaticalGenderInanimate, + Pronouns: map[string]Pronouns{ + "p": { + Type: PronounsType, + Pronouns: "it", + Contexts: map[PronounsContext]bool{ + PronounsContextWork: true, + }, + Pref: 1, + }, + }, + }, + Titles: map[string]Title{ + "t": { + Type: TitleType, + Name: "The", + Kind: TitleKindTitle, + OrganizationId: "o", + }, + }, + Emails: map[string]EmailAddress{ + "e": { + Type: EmailAddressType, + Address: "info@opencloud.eu.example.com", + Contexts: map[EmailAddressContext]bool{ + EmailAddressContextWork: true, + }, + Pref: 1, + Label: "work", + }, + }, + OnlineServices: map[string]OnlineService{ + "s": { + Type: OnlineServiceType, + Service: "The Misinformation Game", + Uri: "https://misinfogame.com/91886aa0-3586-4ade-b9bb-ec031464a251", + User: "opencloudeu", + Contexts: map[OnlineServiceContext]bool{ + OnlineServiceContextWork: true, + }, + Pref: 1, + Label: "imaginary", + }, + }, + Phones: map[string]Phone{ + "p": { + Type: PhoneType, + Number: "+1-804-222-1111", + Features: map[PhoneFeature]bool{ + PhoneFeatureVoice: true, + PhoneFeatureText: true, + }, + Contexts: map[PhoneContext]bool{ + PhoneContextWork: true, + }, + Pref: 1, + Label: "imaginary", + }, + }, + PreferredLanguages: map[string]LanguagePref{ + "wa": { + Type: LanguagePrefType, + Language: "wa-BE", + Contexts: map[LanguagePrefContext]bool{ + LanguagePrefContextPrivate: true, + }, + Pref: 1, + }, + "de": { + Language: "de-DE", + Contexts: map[LanguagePrefContext]bool{ + LanguagePrefContextWork: true, + }, + Pref: 2, + }, + }, + Calendars: map[string]Calendar{ + "c": { + Type: CalendarType, + Kind: CalendarKindCalendar, + Uri: "https://opencloud.eu/calendars/521b032b-a2b3-4540-81b9-3f6bccacaab2", + MediaType: "application/jscontact+json", + Contexts: map[CalendarContext]bool{ + CalendarContextWork: true, + }, + Pref: 1, + Label: "work", + }, + }, + SchedulingAddresses: map[string]SchedulingAddress{ + "s": { + Type: SchedulingAddressType, + Uri: "mailto:scheduling@opencloud.eu.example.com", + Contexts: map[SchedulingAddressContext]bool{ + SchedulingAddressContextWork: true, + }, + Pref: 1, + Label: "work", + }, + }, + Addresses: map[string]Address{ + "k26": { + Type: AddressType, + Components: []AddressComponent{ + {Type: AddressComponentType, Kind: AddressComponentKindBlock, Value: "2-7"}, + {Kind: AddressComponentKindSeparator, Value: "-"}, + {Kind: AddressComponentKindNumber, Value: "2"}, + {Kind: AddressComponentKindSeparator, Value: " "}, + {Kind: AddressComponentKindDistrict, Value: "Marunouchi"}, + {Kind: AddressComponentKindLocality, Value: "Chiyoda-ku"}, + {Kind: AddressComponentKindRegion, Value: "Tokyo"}, + {Kind: AddressComponentKindSeparator, Value: " "}, + {Kind: AddressComponentKindPostcode, Value: "100-8994"}, + }, + IsOrdered: true, + DefaultSeparator: ", ", + Full: "2-7-2 Marunouchi, Chiyoda-ku, Tokyo 100-8994", + CountryCode: "JP", + Coordinates: "geo:35.6796373,139.7616907", + TimeZone: "JST", + Contexts: map[AddressContext]bool{ + AddressContextDelivery: true, + AddressContextWork: true, + }, + Pref: 2, + }, + }, + CryptoKeys: map[string]CryptoKey{ + "k1": { + Type: CryptoKeyType, + Uri: "https://opencloud.eu.example.com/keys/d550f57c-582c-43cc-8d94-822bded9ab36", + MediaType: "application/pgp-keys", + Contexts: map[CryptoKeyContext]bool{ + CryptoKeyContextWork: true, + }, + Pref: 1, + Label: "keys", + }, + }, + Directories: map[string]Directory{ + "d1": { + Type: DirectoryType, + Kind: DirectoryKindEntry, + Uri: "https://opencloud.eu.example.com/addressbook/8c2f0363-af0a-4d16-a9d5-8a9cd885d722", + ListAs: 1, + }, + }, + Links: map[string]Link{ + "r1": { + Type: LinkType, + Kind: LinkKindContact, + Contexts: map[LinkContext]bool{ + LinkContextWork: true, + }, + Uri: "mailto:contact@opencloud.eu.example.com", + }, + }, + Media: map[string]Media{ + "m": { + Type: MediaType, + Kind: MediaKindLogo, + Uri: "https://opencloud.eu.example.com/opencloud.svg", + MediaType: "image/svg+xml", + Contexts: map[MediaContext]bool{ + MediaContextWork: true, + }, + Pref: 123, + Label: "svg", + BlobId: "53feefbabeb146fcbe3e59e91462fa5f", + }, + }, + Anniversaries: map[string]Anniversary{ + "birth": { + Type: AnniversaryType, + Kind: AnniversaryKindBirth, + Date: &PartialDate{ + Type: PartialDateType, + Year: 2025, + Month: 9, + Day: 26, + CalendarScale: "iso8601", + }, + }, + }, + Keywords: map[string]bool{ + "imaginary": true, + "test": true, + }, + Notes: map[string]Note{ + "n1": { + Type: NoteType, + Note: "This is a note.", + Created: created, + Author: &Author{ + Type: AuthorType, + Name: "Test Data", + Uri: "https://isbn.example.com/a461f292-6bf1-470e-b08d-f6b4b0223fe3", + }, + }, + }, + PersonalInfo: map[string]PersonalInfo{ + "p1": { + Type: PersonalInfoType, + Kind: PersonalInfoKindExpertise, + Value: "Clouds", + Level: PersonalInfoLevelHigh, + ListAs: 1, + Label: "experts", + }, + }, + Localizations: map[string]PatchObject{ + "fr": { + "personalInfo": map[string]any{ + "value": "Nuages", + }, + }, + }, + }) +} diff --git a/pkg/log/log_safely.go b/pkg/log/log_safely.go new file mode 100644 index 0000000000..67c4df9dd6 --- /dev/null +++ b/pkg/log/log_safely.go @@ -0,0 +1,59 @@ +package log + +import "github.com/rs/zerolog" + +const ( + logMaxStrLength = 512 + logMaxStrArrayLength = 16 // 8kb +) + +// Safely caps a string to a given size to avoid log bombing. +// Use this function to wrap strings that are user input (HTTP headers, path parameters, URI parameters, HTTP body, ...). +func SafeString(text string) string { + runes := []rune(text) + + if len(runes) <= logMaxStrLength { + return text + } else { + return string(runes[0:logMaxStrLength-1]) + `\u2026` // hellip + } +} + +type SafeLogStringArrayMarshaller struct { + array []string +} + +func (m SafeLogStringArrayMarshaller) MarshalZerologArray(a *zerolog.Array) { + for i, elem := range m.array { + if i >= logMaxStrArrayLength { + return + } + a.Str(SafeString(elem)) + } +} + +var _ zerolog.LogArrayMarshaler = SafeLogStringArrayMarshaller{} + +func SafeStringArray(array []string) SafeLogStringArrayMarshaller { + return SafeLogStringArrayMarshaller{array: array} +} + +type StringArrayMarshaller struct { + array []string +} + +func (m StringArrayMarshaller) MarshalZerologArray(a *zerolog.Array) { + for _, elem := range m.array { + a.Str(elem) + } +} + +var _ zerolog.LogArrayMarshaler = StringArrayMarshaller{} + +func StringArray(array []string) StringArrayMarshaller { + return StringArrayMarshaller{array: array} +} + +func From(context zerolog.Context) *Logger { + return &Logger{Logger: context.Logger()} +} diff --git a/pkg/structs/structs.go b/pkg/structs/structs.go index b6545d048a..80aaf457fe 100644 --- a/pkg/structs/structs.go +++ b/pkg/structs/structs.go @@ -1,6 +1,13 @@ // Package structs provides some utility functions for dealing with structs. package structs +import ( + "maps" + "slices" + + orderedmap "github.com/wk8/go-ordered-map" +) + // CopyOrZeroValue returns a copy of s if s is not nil otherwise the zero value of T will be returned. func CopyOrZeroValue[T any](s *T) *T { cp := new(T) @@ -9,3 +16,238 @@ func CopyOrZeroValue[T any](s *T) *T { } return cp } + +// Returns a copy of an array with a unique set of elements. +// +// Element order is retained. +func Uniq[T comparable](source []T) []T { + m := orderedmap.New() + for _, v := range source { + m.Set(v, true) + } + set := make([]T, m.Len()) + i := 0 + for pair := m.Oldest(); pair != nil; pair = pair.Next() { + set[i] = pair.Key.(T) + i++ + } + return set +} + +func Keys[K comparable, V any](source map[K]V) []K { + if source == nil { + var zero []K + return zero + } + return slices.Collect(maps.Keys(source)) +} + +func Index[K comparable, V any](source []V, indexer func(V) K) map[K]V { + if source == nil { + var zero map[K]V + return zero + } + result := map[K]V{} + for _, v := range source { + k := indexer(v) + result[k] = v + } + return result +} + +func Map[E any, R any](source []E, mapper func(E) R) []R { + if source == nil { + var zero []R + return zero + } + result := make([]R, len(source)) + for i, e := range source { + result[i] = mapper(e) + } + return result +} + +func MapValues[K comparable, S any, T any](m map[K]S, mapper func(S) T) map[K]T { + r := make(map[K]T, len(m)) + for k, s := range m { + r[k] = mapper(s) + } + return r +} + +func MapValues2[K comparable, S any, T any](m map[K]S, mapper func(K, S) T) map[K]T { + r := make(map[K]T, len(m)) + for k, s := range m { + r[k] = mapper(k, s) + } + return r +} + +func MapKeys[S comparable, T comparable, V any](m map[S]V, mapper func(S) T) map[T]V { + r := make(map[T]V, len(m)) + for s, v := range m { + r[mapper(s)] = v + } + return r +} + +func MapKeys2[S comparable, T comparable, V any](m map[S]V, mapper func(S, V) T) map[T]V { + r := make(map[T]V, len(m)) + for s, v := range m { + r[mapper(s, v)] = v + } + return r +} + +func ToBoolMap[E comparable](source []E) map[E]bool { + m := make(map[E]bool, len(source)) + for _, v := range source { + m[v] = true + } + return m +} + +func ToIntMap[E comparable](source []E) map[E]int { + m := make(map[E]int, len(source)) + for _, v := range source { + if e, ok := m[v]; ok { + m[v] = e + 1 + } else { + m[v] = 1 + } + } + return m +} + +func MapN[E any, R any](source []E, indexer func(E) *R) []R { + if source == nil { + var zero []R + return zero + } + result := []R{} + for _, e := range source { + opt := indexer(e) + if opt != nil { + result = append(result, *opt) + } + } + return result +} + +// Check whether two slices contain the same elements, ignoring order. +func SameSlices[E comparable](x, y []E) bool { + // https://stackoverflow.com/a/36000696 + if len(x) != len(y) { + return false + } + // create a map of string -> int + diff := make(map[E]int, len(x)) + for _, _x := range x { + // 0 value for int is 0, so just increment a counter for the string + diff[_x]++ + } + for _, _y := range y { + // If the string _y is not in diff bail out early + if _, ok := diff[_y]; !ok { + return false + } + diff[_y]-- + if diff[_y] == 0 { + delete(diff, _y) + } + } + return len(diff) == 0 +} + +func Missing[E comparable](expected, actual []E) []E { + missing := []E{} + actualIndex := ToBoolMap(actual) + for _, e := range expected { + if _, ok := actualIndex[e]; !ok { + missing = append(missing, e) + } + } + return missing +} + +func FirstKey[K comparable, V any](m map[K]V) (K, bool) { + for k := range m { + return k, true + } + var zero K + return zero, false +} + +func Any[E any](s []E, predicate func(E) bool) bool { + if len(s) < 1 { + return false + } + for _, e := range s { + if predicate(e) { + return true + } + } + return false +} + +func AnyKey[K comparable, V any](m map[K]V, predicate func(K) bool) bool { + if len(m) < 1 { + return false + } + for k := range m { + if predicate(k) { + return true + } + } + return false +} + +func AnyValue[K comparable, V any](m map[K]V, predicate func(V) bool) bool { + if len(m) < 1 { + return false + } + for _, v := range m { + if predicate(v) { + return true + } + } + return false +} + +func AnyItem[K comparable, V any](m map[K]V, predicate func(K, V) bool) bool { + if len(m) < 1 { + return false + } + for k, v := range m { + if predicate(k, v) { + return true + } + } + return false +} + +func Concat[E any](arys ...[]E) []E { + l := 0 + for _, ary := range arys { + l += len(ary) + } + r := make([]E, l) + + i := 0 + for _, ary := range arys { + if ary != nil { + i += copy(r[i:], ary) + } + } + return r +} + +func Filter[E any](s []E, predicate func(E) bool) []E { + r := []E{} + for _, e := range s { + if predicate(e) { + r = append(r, e) + } + } + return r +} diff --git a/pkg/structs/structs_test.go b/pkg/structs/structs_test.go index 1e8f4096cd..f81de10ce1 100644 --- a/pkg/structs/structs_test.go +++ b/pkg/structs/structs_test.go @@ -1,6 +1,12 @@ package structs -import "testing" +import ( + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) type example struct { Attribute1 string @@ -36,3 +42,132 @@ func TestCopyOrZeroValue(t *testing.T) { t.Error("CopyOrZeroValue didn't correctly copy attributes") } } + +func TestUniqWithInts(t *testing.T) { + tests := []struct { + input []int + expected []int + }{ + {[]int{5, 1, 3, 1, 4}, []int{5, 1, 3, 4}}, + {[]int{1, 1, 1}, []int{1}}, + } + for i, tt := range tests { + t.Run(fmt.Sprintf("%d: testing %v", i+1, tt.input), func(t *testing.T) { + result := Uniq(tt.input) + assert.EqualValues(t, tt.expected, result) + }) + } +} + +type u struct { + x int + y string +} + +var ( + u1 = u{x: 1, y: "un"} + u2 = u{x: 2, y: "deux"} + u3 = u{x: 3, y: "trois"} +) + +func TestUniqWithStructs(t *testing.T) { + tests := []struct { + input []u + expected []u + }{ + {[]u{u3, u1, u2, u3, u2, u1}, []u{u3, u1, u2}}, + } + for i, tt := range tests { + t.Run(fmt.Sprintf("%d: testing %v", i+1, tt.input), func(t *testing.T) { + result := Uniq(tt.input) + assert.EqualValues(t, tt.expected, result) + }) + } +} + +func TestKeys(t *testing.T) { + tests := []struct { + input map[int]string + expected []int + }{ + {map[int]string{5: "cinq", 1: "un", 3: "trois", 4: "vier"}, []int{5, 1, 3, 4}}, + {map[int]string{1: "un"}, []int{1}}, + } + for i, tt := range tests { + t.Run(fmt.Sprintf("%d: testing %v", i+1, tt.input), func(t *testing.T) { + result := Keys(tt.input) + assert.ElementsMatch(t, tt.expected, result) + }) + } +} + +func TestMissing(t *testing.T) { + tests := []struct { + source []string + input []string + expected []string + }{ + {[]string{"a", "b", "c"}, []string{"c", "b", "a"}, []string{}}, + {[]string{"a", "b", "c"}, []string{"c", "b"}, []string{"a"}}, + {[]string{"a", "b", "c"}, []string{"c", "b", "a", "d"}, []string{}}, + {[]string{}, []string{"c", "b"}, []string{}}, + {[]string{"a", "b", "c"}, []string{}, []string{"a", "b", "c"}}, + {[]string{"a", "b", "b", "c"}, []string{"a", "b"}, []string{"c"}}, + } + for i, tt := range tests { + t.Run(fmt.Sprintf("%d: testing [%v] <-> [%v] == [%v]", i+1, strings.Join(tt.source, ", "), strings.Join(tt.input, ", "), strings.Join(tt.expected, ", ")), func(t *testing.T) { + result := Missing(tt.source, tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestAny(t *testing.T) { + always := func(s string) bool { return true } + never := func(s string) bool { return false } + assert.True(t, Any([]string{"a", "b", "c"}, always)) + assert.False(t, Any([]string{}, always)) + assert.False(t, Any(nil, always)) + assert.False(t, Any([]string{"a", "b", "c"}, never)) + assert.False(t, Any(nil, never)) +} + +func TestAnyKey(t *testing.T) { + always := func(s string) bool { return true } + never := func(s string) bool { return false } + + assert.True(t, AnyKey(map[string]bool{"a": true, "b": false}, always)) + assert.False(t, AnyKey(map[string]bool{}, always)) + assert.False(t, AnyKey[string, bool](nil, always)) + assert.False(t, AnyKey(map[string]bool{"a": true, "b": false}, never)) + assert.False(t, AnyKey[string, bool](nil, never)) +} + +func TestAnyValue(t *testing.T) { + always := func(b bool) bool { return true } + never := func(b bool) bool { return false } + + assert.True(t, AnyValue(map[string]bool{"a": true, "b": false}, always)) + assert.False(t, AnyValue(map[string]bool{}, always)) + assert.False(t, AnyValue[string](nil, always)) + assert.False(t, AnyValue(map[string]bool{"a": true, "b": false}, never)) + assert.False(t, AnyValue[string](nil, never)) +} + +func TestAnyItem(t *testing.T) { + always := func(s string, b bool) bool { return true } + never := func(s string, b bool) bool { return false } + + assert.True(t, AnyItem(map[string]bool{"a": true, "b": false}, always)) + assert.False(t, AnyItem(map[string]bool{}, always)) + assert.False(t, AnyItem(nil, always)) + assert.False(t, AnyItem(map[string]bool{"a": true, "b": false}, never)) + assert.False(t, AnyItem(nil, never)) +} + +func TestConcat(t *testing.T) { + assert.Equal(t, []string{"a", "b", "c", "d", "e", "f"}, Concat([]string{"a", "b"}, []string{"c"}, []string{"d", "e", "f"})) + assert.Equal(t, []string{"a"}, Concat([]string{"a"})) + assert.Equal(t, []string{"a"}, Concat([]string{}, nil, []string{"a"})) + assert.Equal(t, []string{}, Concat[string]()) +} diff --git a/services/auth-api/Makefile b/services/auth-api/Makefile new file mode 100644 index 0000000000..fa6ce3234d --- /dev/null +++ b/services/auth-api/Makefile @@ -0,0 +1,11 @@ +SHELL := bash +NAME := auth-api + +ifneq (, $(shell command -v go 2> /dev/null)) # suppress `command not found warnings` for non go targets in CI +include ../../.bingo/Variables.mk +endif + +include ../../.make/default.mk +include ../../.make/go.mk +include ../../.make/release.mk +include ../../.make/docs.mk diff --git a/services/auth-api/cmd/auth-api/main.go b/services/auth-api/cmd/auth-api/main.go new file mode 100644 index 0000000000..77d591d25c --- /dev/null +++ b/services/auth-api/cmd/auth-api/main.go @@ -0,0 +1,19 @@ +package main + +import ( + "context" + "os" + "os/signal" + "syscall" + + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/command" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config/defaults" +) + +func main() { + cfg := defaults.DefaultConfig() + cfg.Context, _ = signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT, syscall.SIGHUP) + if err := command.Execute(cfg); err != nil { + os.Exit(1) + } +} diff --git a/services/auth-api/pkg/command/root.go b/services/auth-api/pkg/command/root.go new file mode 100644 index 0000000000..1053eace93 --- /dev/null +++ b/services/auth-api/pkg/command/root.go @@ -0,0 +1,27 @@ +package command + +import ( + "os" + + "github.com/opencloud-eu/opencloud/pkg/clihelper" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" + "github.com/urfave/cli/v2" +) + +// GetCommands provides all commands for this service +func GetCommands(cfg *config.Config) cli.Commands { + return []*cli.Command{ + Server(cfg), + Version(cfg), + } +} + +func Execute(cfg *config.Config) error { + app := clihelper.DefaultApp(&cli.App{ + Name: "auth-api", + Usage: "OpenCloud authentication API for external services", + Commands: GetCommands(cfg), + }) + + return app.RunContext(cfg.Context, os.Args) +} diff --git a/services/auth-api/pkg/command/server.go b/services/auth-api/pkg/command/server.go new file mode 100644 index 0000000000..2e4812b203 --- /dev/null +++ b/services/auth-api/pkg/command/server.go @@ -0,0 +1,91 @@ +package command + +import ( + "context" + "fmt" + + "github.com/oklog/run" + "github.com/opencloud-eu/opencloud/pkg/config/configlog" + "github.com/opencloud-eu/opencloud/pkg/version" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config/parser" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/logging" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/metrics" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/server/debug" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/server/http" + "github.com/urfave/cli/v2" +) + +// Server is the entrypoint for the server command. +func Server(cfg *config.Config) *cli.Command { + return &cli.Command{ + Name: "server", + Usage: fmt.Sprintf("start the %s service without runtime (unsupervised mode)", cfg.Service.Name), + Category: "server", + Before: func(_ *cli.Context) error { + return configlog.ReturnFatal(parser.ParseConfig(cfg)) + }, + Action: func(c *cli.Context) error { + logger := logging.Configure(cfg.Service.Name, cfg.Log) + + var ( + gr = run.Group{} + ctx, cancel = context.WithCancel(c.Context) + m = metrics.New() + ) + + defer cancel() + + m.BuildInfo.WithLabelValues(version.GetString()).Set(1) + + server, err := debug.Server( + debug.Logger(logger), + debug.Config(cfg), + debug.Context(ctx), + ) + if err != nil { + logger.Info().Err(err).Str("transport", "debug").Msg("Failed to initialize server") + return err + } + + gr.Add(server.ListenAndServe, func(_ error) { + _ = server.Shutdown(ctx) + cancel() + }) + + httpServer, err := http.Server( + http.Logger(logger), + http.Context(ctx), + http.Config(cfg), + http.Metrics(m), + http.Namespace(cfg.HTTP.Namespace), + ) + if err != nil { + logger.Info(). + Err(err). + Str("transport", "http"). + Msg("Failed to initialize server") + + return err + } + + gr.Add(httpServer.Run, func(_ error) { + if err == nil { + logger.Info(). + Str("transport", "http"). + Str("server", cfg.Service.Name). + Msg("Shutting down server") + } else { + logger.Error().Err(err). + Str("transport", "http"). + Str("server", cfg.Service.Name). + Msg("Shutting down server") + } + + cancel() + }) + + return gr.Run() + }, + } +} diff --git a/services/auth-api/pkg/command/version.go b/services/auth-api/pkg/command/version.go new file mode 100644 index 0000000000..35b1243402 --- /dev/null +++ b/services/auth-api/pkg/command/version.go @@ -0,0 +1,26 @@ +package command + +import ( + "fmt" + + "github.com/opencloud-eu/opencloud/pkg/version" + + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" + "github.com/urfave/cli/v2" +) + +// Version prints the service versions of all running instances. +func Version(cfg *config.Config) *cli.Command { + return &cli.Command{ + Name: "version", + Usage: "print the version of this binary and the running service instances", + Category: "info", + Action: func(c *cli.Context) error { + fmt.Println("Version: " + version.GetString()) + fmt.Printf("Compiled: %s\n", version.Compiled()) + fmt.Println("") + + return nil + }, + } +} diff --git a/services/auth-api/pkg/config/config.go b/services/auth-api/pkg/config/config.go new file mode 100644 index 0000000000..60e8f539be --- /dev/null +++ b/services/auth-api/pkg/config/config.go @@ -0,0 +1,27 @@ +package config + +import ( + "context" + + "github.com/opencloud-eu/opencloud/pkg/shared" +) + +// Config combines all available configuration parts. +type Config struct { + Commons *shared.Commons `yaml:"-"` // don't use this directly as configuration for a service + + Service Service `yaml:"-"` + + Log *Log `yaml:"log"` + Debug Debug `yaml:"debug"` + + HTTP HTTP `yaml:"http"` + + Authentication AuthenticationAPI `yaml:"authentication_api"` + + Context context.Context `yaml:"-"` +} + +type AuthenticationAPI struct { + JwkEndpoint string `yaml:"jwk_endpoint"` +} diff --git a/services/auth-api/pkg/config/debug.go b/services/auth-api/pkg/config/debug.go new file mode 100644 index 0000000000..c342ca0bd4 --- /dev/null +++ b/services/auth-api/pkg/config/debug.go @@ -0,0 +1,9 @@ +package config + +// Debug defines the available debug configuration. +type Debug struct { + Addr string `yaml:"addr" env:"AUTHAPI_DEBUG_ADDR" desc:"Bind address of the debug server, where metrics, health, config and debug endpoints will be exposed." introductionVersion:"1.0.0"` + Token string `yaml:"token" env:"AUTHAPI_DEBUG_TOKEN" desc:"Token to secure the metrics endpoint." introductionVersion:"1.0.0"` + Pprof bool `yaml:"pprof" env:"AUTHAPI_DEBUG_PPROF" desc:"Enables pprof, which can be used for profiling." introductionVersion:"1.0.0"` + Zpages bool `yaml:"zpages" env:"AUTHAPI_DEBUG_ZPAGES" desc:"Enables zpages, which can be used for collecting and viewing in-memory traces." introductionVersion:"1.0.0"` +} diff --git a/services/auth-api/pkg/config/defaults/defaultconfig.go b/services/auth-api/pkg/config/defaults/defaultconfig.go new file mode 100644 index 0000000000..7860c2900f --- /dev/null +++ b/services/auth-api/pkg/config/defaults/defaultconfig.go @@ -0,0 +1,64 @@ +package defaults + +import ( + "strings" + + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" +) + +// FullDefaultConfig returns a fully initialized default configuration +func FullDefaultConfig() *config.Config { + cfg := DefaultConfig() + EnsureDefaults(cfg) + Sanitize(cfg) + return cfg +} + +// DefaultConfig returns a basic default configuration +func DefaultConfig() *config.Config { + return &config.Config{ + Debug: config.Debug{ + Addr: "127.0.0.1:9202", + Token: "", + Pprof: false, + Zpages: false, + }, + HTTP: config.HTTP{ + Addr: "127.0.0.1:9278", + Root: "/auth", + Namespace: "eu.opencloud.web", + }, + Service: config.Service{ + Name: "auth-api", + }, + Authentication: config.AuthenticationAPI{ + JwkEndpoint: "https://keycloak.opencloud.test/realms/openCloud/protocol/openid-connect/certs", + }, + } +} + +// EnsureDefaults adds default values to the configuration if they are not set yet +func EnsureDefaults(cfg *config.Config) { + // provide with defaults for shared logging, since we need a valid destination address for "envdecode". + if cfg.Log == nil && cfg.Commons != nil && cfg.Commons.Log != nil { + cfg.Log = &config.Log{ + Level: cfg.Commons.Log.Level, + Pretty: cfg.Commons.Log.Pretty, + Color: cfg.Commons.Log.Color, + File: cfg.Commons.Log.File, + } + } else if cfg.Log == nil { + cfg.Log = &config.Log{} + } + + if cfg.Commons != nil { + cfg.HTTP.TLS = cfg.Commons.HTTPServiceTLS + } +} + +// Sanitize sanitized the configuration +func Sanitize(cfg *config.Config) { + if cfg.HTTP.Root != "/" { + cfg.HTTP.Root = strings.TrimSuffix(cfg.HTTP.Root, "/") + } +} diff --git a/services/auth-api/pkg/config/http.go b/services/auth-api/pkg/config/http.go new file mode 100644 index 0000000000..61667b34eb --- /dev/null +++ b/services/auth-api/pkg/config/http.go @@ -0,0 +1,11 @@ +package config + +import "github.com/opencloud-eu/opencloud/pkg/shared" + +// HTTP defines the available http configuration. +type HTTP struct { + Addr string `yaml:"addr" env:"AUTHAPI_HTTP_ADDR" desc:"The bind address of the HTTP service." introductionVersion:"1.0.0"` + TLS shared.HTTPServiceTLS `yaml:"tls"` + Root string `yaml:"root" env:"AUTHAPI_HTTP_ROOT" desc:"Subdirectory that serves as the root for this HTTP service." introductionVersion:"1.0.0"` + Namespace string `yaml:"-"` +} diff --git a/services/auth-api/pkg/config/log.go b/services/auth-api/pkg/config/log.go new file mode 100644 index 0000000000..986d323bbd --- /dev/null +++ b/services/auth-api/pkg/config/log.go @@ -0,0 +1,9 @@ +package config + +// Log defines the available log configuration. +type Log struct { + Level string `mapstructure:"level" env:"OC_LOG_LEVEL;AUTHAPI_LOG_LEVEL" desc:"The log level. Valid values are: 'panic', 'fatal', 'error', 'warn', 'info', 'debug', 'trace'." introductionVersion:"1.0.0"` + Pretty bool `mapstructure:"pretty" env:"OC_LOG_PRETTY;AUTHAPI_LOG_PRETTY" desc:"Activates pretty log output." introductionVersion:"1.0.0"` + Color bool `mapstructure:"color" env:"OC_LOG_COLOR;AUTHAPI_LOG_COLOR" desc:"Activates colorized log output." introductionVersion:"1.0.0"` + File string `mapstructure:"file" env:"OC_LOG_FILE;AUTHAPI_LOG_FILE" desc:"The path to the log file. Activates logging to this file if set." introductionVersion:"1.0.0"` +} diff --git a/services/auth-api/pkg/config/parser/parse.go b/services/auth-api/pkg/config/parser/parse.go new file mode 100644 index 0000000000..8dbefde027 --- /dev/null +++ b/services/auth-api/pkg/config/parser/parse.go @@ -0,0 +1,39 @@ +package parser + +import ( + "errors" + + occfg "github.com/opencloud-eu/opencloud/pkg/config" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config/defaults" + + "github.com/opencloud-eu/opencloud/pkg/config/envdecode" +) + +// ParseConfig loads configuration from known paths. +func ParseConfig(cfg *config.Config) error { + err := occfg.BindSourcesToStructs(cfg.Service.Name, cfg) + if err != nil { + return err + } + + defaults.EnsureDefaults(cfg) + + // load all env variables relevant to the config in the current context. + if err := envdecode.Decode(cfg); err != nil { + // no environment variable set for this config is an expected "error" + if !errors.Is(err, envdecode.ErrNoTargetFieldsAreSet) { + return err + } + } + + // sanitize config + defaults.Sanitize(cfg) + + return Validate(cfg) +} + +// Validate can validate the configuration +func Validate(_ *config.Config) error { + return nil +} diff --git a/services/auth-api/pkg/config/service.go b/services/auth-api/pkg/config/service.go new file mode 100644 index 0000000000..d1eac383f0 --- /dev/null +++ b/services/auth-api/pkg/config/service.go @@ -0,0 +1,6 @@ +package config + +// Service defines the available service configuration. +type Service struct { + Name string `yaml:"-"` +} diff --git a/services/auth-api/pkg/logging/logging.go b/services/auth-api/pkg/logging/logging.go new file mode 100644 index 0000000000..b63a748887 --- /dev/null +++ b/services/auth-api/pkg/logging/logging.go @@ -0,0 +1,17 @@ +package logging + +import ( + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" +) + +// Configure initializes a service-specific logger instance. +func Configure(name string, cfg *config.Log) log.Logger { + return log.NewLogger( + log.Name(name), + log.Level(cfg.Level), + log.Pretty(cfg.Pretty), + log.Color(cfg.Color), + log.File(cfg.File), + ) +} diff --git a/services/auth-api/pkg/metrics/metrics.go b/services/auth-api/pkg/metrics/metrics.go new file mode 100644 index 0000000000..c684316cd1 --- /dev/null +++ b/services/auth-api/pkg/metrics/metrics.go @@ -0,0 +1,74 @@ +package metrics + +import "github.com/prometheus/client_golang/prometheus" + +var ( + // Namespace defines the namespace for the defines metrics. + Namespace = "opencloud" + + // Subsystem defines the subsystem for the defines metrics. + Subsystem = "authapi" +) + +// Metrics defines the available metrics of this service. +type Metrics struct { + BuildInfo *prometheus.GaugeVec + Duration *prometheus.HistogramVec + Attempts *prometheus.CounterVec +} + +const ( + TypeLabel = "type" + BasicType = "basic" + BearerType = "bearer" + UnsupportedType = "unsupported" + OutcomeLabel = "outcome" + AttemptSuccessOutcome = "success" + AttemptFailureOutcome = "failure" +) + +// New initializes the available metrics. +func New(opts ...Option) *Metrics { + options := newOptions(opts...) + + m := &Metrics{ + BuildInfo: prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "build_info", + Help: "Build information", + }, []string{"version"}), + Duration: prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "authentication_duration_seconds", + Help: "Authentication processing time in seconds", + }, []string{"type"}), + Attempts: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "athentication_attempts_total", + Help: "How many authentication attempts were processed", + }, []string{"outcome"}), + } + + if err := prometheus.Register(m.BuildInfo); err != nil { + options.Logger.Error(). + Err(err). + Str("metric", "BuildInfo"). + Msg("Failed to register prometheus metric") + } + if err := prometheus.Register(m.Duration); err != nil { + options.Logger.Error(). + Err(err). + Str("metric", "Duration"). + Msg("Failed to register prometheus metric") + } + if err := prometheus.Register(m.Attempts); err != nil { + options.Logger.Error(). + Err(err). + Str("metric", "Attempts"). + Msg("Failed to register prometheus metric") + } + return m +} diff --git a/services/auth-api/pkg/metrics/options.go b/services/auth-api/pkg/metrics/options.go new file mode 100644 index 0000000000..304456e7d1 --- /dev/null +++ b/services/auth-api/pkg/metrics/options.go @@ -0,0 +1,31 @@ +package metrics + +import ( + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// Option defines a single option function. +type Option func(o *Options) + +// Options defines the available options for this package. +type Options struct { + Logger log.Logger +} + +// newOptions initializes the available default options. +func newOptions(opts ...Option) Options { + opt := Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +// Logger provides a function to set the logger option. +func Logger(val log.Logger) Option { + return func(o *Options) { + o.Logger = val + } +} diff --git a/services/auth-api/pkg/server/debug/option.go b/services/auth-api/pkg/server/debug/option.go new file mode 100644 index 0000000000..59e0d488d1 --- /dev/null +++ b/services/auth-api/pkg/server/debug/option.go @@ -0,0 +1,50 @@ +package debug + +import ( + "context" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" +) + +// Option defines a single option function. +type Option func(o *Options) + +// Options defines the available options for this package. +type Options struct { + Logger log.Logger + Context context.Context + Config *config.Config +} + +// newOptions initializes the available default options. +func newOptions(opts ...Option) Options { + opt := Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +// Logger provides a function to set the logger option. +func Logger(val log.Logger) Option { + return func(o *Options) { + o.Logger = val + } +} + +// Context provides a function to set the context option. +func Context(val context.Context) Option { + return func(o *Options) { + o.Context = val + } +} + +// Config provides a function to set the config option. +func Config(val *config.Config) Option { + return func(o *Options) { + o.Config = val + } +} diff --git a/services/auth-api/pkg/server/debug/server.go b/services/auth-api/pkg/server/debug/server.go new file mode 100644 index 0000000000..3f54c66012 --- /dev/null +++ b/services/auth-api/pkg/server/debug/server.go @@ -0,0 +1,24 @@ +package debug + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/handlers" + "github.com/opencloud-eu/opencloud/pkg/service/debug" + "github.com/opencloud-eu/opencloud/pkg/version" +) + +// Server initializes the debug service and server. +func Server(opts ...Option) (*http.Server, error) { + options := newOptions(opts...) + + readyHandlerConfiguration := handlers.NewCheckHandlerConfiguration(). + WithLogger(options.Logger) + + return debug.NewService( + debug.Logger(options.Logger), + debug.Name(options.Config.Service.Name), + debug.Version(version.GetString()), + debug.Ready(handlers.NewCheckHandler(readyHandlerConfiguration)), + ), nil +} diff --git a/services/auth-api/pkg/server/http/option.go b/services/auth-api/pkg/server/http/option.go new file mode 100644 index 0000000000..cb4b0b5f75 --- /dev/null +++ b/services/auth-api/pkg/server/http/option.go @@ -0,0 +1,83 @@ +package http + +import ( + "context" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/metrics" + "github.com/urfave/cli/v2" + "go.opentelemetry.io/otel/trace" + "go.opentelemetry.io/otel/trace/noop" +) + +// Option defines a single option function. +type Option func(o *Options) + +// Options defines the available options for this package. +type Options struct { + Namespace string + Logger log.Logger + Context context.Context + Config *config.Config + Metrics *metrics.Metrics + Flags []cli.Flag + TraceProvider trace.TracerProvider +} + +// newOptions initializes the available default options. +func newOptions(opts ...Option) Options { + opt := Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +// Logger provides a function to set the logger option. +func Logger(val log.Logger) Option { + return func(o *Options) { + o.Logger = val + } +} + +// Context provides a function to set the context option. +func Context(val context.Context) Option { + return func(o *Options) { + o.Context = val + } +} + +// Config provides a function to set the config option. +func Config(val *config.Config) Option { + return func(o *Options) { + o.Config = val + } +} + +// Metrics provides a function to set the metrics option. +func Metrics(val *metrics.Metrics) Option { + return func(o *Options) { + o.Metrics = val + } +} + +// Namespace provides a function to set the Namespace option. +func Namespace(val string) Option { + return func(o *Options) { + o.Namespace = val + } +} + +// TraceProvider provides a function to configure the trace provider +func TraceProvider(traceProvider trace.TracerProvider) Option { + return func(o *Options) { + if traceProvider != nil { + o.TraceProvider = traceProvider + } else { + o.TraceProvider = noop.NewTracerProvider() + } + } +} diff --git a/services/auth-api/pkg/server/http/server.go b/services/auth-api/pkg/server/http/server.go new file mode 100644 index 0000000000..33141d1fd8 --- /dev/null +++ b/services/auth-api/pkg/server/http/server.go @@ -0,0 +1,61 @@ +package http + +import ( + "fmt" + + "github.com/go-chi/chi/v5/middleware" + opencloudmiddleware "github.com/opencloud-eu/opencloud/pkg/middleware" + "github.com/opencloud-eu/opencloud/pkg/service/http" + "github.com/opencloud-eu/opencloud/pkg/version" + svc "github.com/opencloud-eu/opencloud/services/auth-api/pkg/service/http/v0" + "go-micro.dev/v4" +) + +// Server initializes the http service and server. +func Server(opts ...Option) (http.Service, error) { + options := newOptions(opts...) + + service, err := http.NewService( + http.TLSConfig(options.Config.HTTP.TLS), + http.Logger(options.Logger), + http.Name(options.Config.Service.Name), + http.Version(version.GetString()), + http.Namespace(options.Config.HTTP.Namespace), + http.Address(options.Config.HTTP.Addr), + http.Context(options.Context), + http.TraceProvider(options.TraceProvider), + ) + if err != nil { + options.Logger.Error(). + Err(err). + Msg("Error initializing http service") + return http.Service{}, fmt.Errorf("could not initialize http service: %w", err) + } + + handle := svc.NewService( + svc.Logger(options.Logger), + svc.Config(options.Config), + svc.Metrics(options.Metrics), + svc.TraceProvider(options.TraceProvider), + svc.Middleware( + middleware.RealIP, + middleware.RequestID, + opencloudmiddleware.Version( + options.Config.Service.Name, + version.GetString(), + ), + opencloudmiddleware.Logger(options.Logger), + ), + ) + + { + handle = svc.NewInstrument(handle, options.Metrics) + handle = svc.NewLogging(handle, options.Logger) + } + + if err := micro.RegisterHandler(service.Server(), handle); err != nil { + return http.Service{}, err + } + + return service, nil +} diff --git a/services/auth-api/pkg/service/http/v0/instrument.go b/services/auth-api/pkg/service/http/v0/instrument.go new file mode 100644 index 0000000000..d1a6663ecb --- /dev/null +++ b/services/auth-api/pkg/service/http/v0/instrument.go @@ -0,0 +1,25 @@ +package svc + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/metrics" +) + +// NewInstrument returns a service that instruments metrics. +func NewInstrument(next Service, metrics *metrics.Metrics) Service { + return instrument{ + next: next, + metrics: metrics, + } +} + +type instrument struct { + next Service + metrics *metrics.Metrics +} + +// ServeHTTP implements the Service interface. +func (i instrument) ServeHTTP(w http.ResponseWriter, r *http.Request) { + i.next.ServeHTTP(w, r) +} diff --git a/services/auth-api/pkg/service/http/v0/logging.go b/services/auth-api/pkg/service/http/v0/logging.go new file mode 100644 index 0000000000..c21734ce11 --- /dev/null +++ b/services/auth-api/pkg/service/http/v0/logging.go @@ -0,0 +1,25 @@ +package svc + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// NewLogging returns a service that logs messages. +func NewLogging(next Service, logger log.Logger) Service { + return logging{ + next: next, + logger: logger, + } +} + +type logging struct { + next Service + logger log.Logger +} + +// ServeHTTP implements the Service interface. +func (l logging) ServeHTTP(w http.ResponseWriter, r *http.Request) { + l.next.ServeHTTP(w, r) +} diff --git a/services/auth-api/pkg/service/http/v0/option.go b/services/auth-api/pkg/service/http/v0/option.go new file mode 100644 index 0000000000..cb0e6615f9 --- /dev/null +++ b/services/auth-api/pkg/service/http/v0/option.go @@ -0,0 +1,66 @@ +package svc + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/metrics" + "go.opentelemetry.io/otel/trace" +) + +// Option defines a single option function. +type Option func(o *Options) + +// Options defines the available options for this package. +type Options struct { + Logger log.Logger + Config *config.Config + Middleware []func(http.Handler) http.Handler + Metrics *metrics.Metrics + TraceProvider trace.TracerProvider +} + +// newOptions initializes the available default options. +func newOptions(opts ...Option) Options { + opt := Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +// Logger provides a function to set the logger option. +func Logger(val log.Logger) Option { + return func(o *Options) { + o.Logger = val + } +} + +// Config provides a function to set the config option. +func Config(val *config.Config) Option { + return func(o *Options) { + o.Config = val + } +} + +// Middleware provides a function to set the middleware option. +func Middleware(val ...func(http.Handler) http.Handler) Option { + return func(o *Options) { + o.Middleware = val + } +} + +func TraceProvider(tp trace.TracerProvider) Option { + return func(o *Options) { + o.TraceProvider = tp + } +} + +func Metrics(m *metrics.Metrics) Option { + return func(o *Options) { + o.Metrics = m + } +} diff --git a/services/auth-api/pkg/service/http/v0/service.go b/services/auth-api/pkg/service/http/v0/service.go new file mode 100644 index 0000000000..f25e74bc99 --- /dev/null +++ b/services/auth-api/pkg/service/http/v0/service.go @@ -0,0 +1,266 @@ +package svc + +import ( + "context" + "crypto/tls" + "net/http" + "regexp" + "time" + + "github.com/MicahParks/jwkset" + "github.com/MicahParks/keyfunc/v3" + "github.com/go-chi/chi/v5" + "github.com/go-chi/render" + "github.com/golang-jwt/jwt/v5" + + "github.com/riandyrn/otelchi" + "go.opentelemetry.io/otel/attribute" + oteltrace "go.opentelemetry.io/otel/trace" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/tracing" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/config" + "github.com/opencloud-eu/opencloud/services/auth-api/pkg/metrics" +) + +// Service defines the service handlers. +type Service interface { + ServeHTTP(w http.ResponseWriter, r *http.Request) +} + +// NewService returns a service implementation for Service. +func NewService(opts ...Option) Service { + options := newOptions(opts...) + + m := chi.NewMux() + m.Use(options.Middleware...) + + m.Use( + otelchi.Middleware( + "auth-api", + otelchi.WithChiRoutes(m), + otelchi.WithTracerProvider(options.TraceProvider), + otelchi.WithPropagators(tracing.GetPropagator()), + otelchi.WithTraceResponseHeaders(otelchi.TraceHeaderConfig{}), + ), + ) + + svc, err := NewAuthenticationApi(options.Config, &options.Logger, options.Metrics, options.TraceProvider, m) + if err != nil { + panic(err) // TODO p.bleser what to do when we encounter an error in a NewService() ? + } + + m.Route(options.Config.HTTP.Root, func(r chi.Router) { + r.Post("/", svc.Authenticate) + }) + + _ = chi.Walk(m, func(method string, route string, _ http.Handler, middlewares ...func(http.Handler) http.Handler) error { + options.Logger.Debug().Str("method", method).Str("route", route).Int("middlewares", len(middlewares)).Msg("serving endpoint") + return nil + }) + + return svc +} + +type AuthenticationApi struct { + config *config.Config + logger *log.Logger + metrics *metrics.Metrics + tracer oteltrace.Tracer + mux *chi.Mux + refreshCtx context.Context + jwksFunc keyfunc.Keyfunc +} + +func NewAuthenticationApi( + config *config.Config, + logger *log.Logger, + metrics *metrics.Metrics, + tracerProvider oteltrace.TracerProvider, + mux *chi.Mux, +) (*AuthenticationApi, error) { + + tracer := tracerProvider.Tracer("instrumentation/" + config.HTTP.Namespace + "/" + config.Service.Name) + + var httpClient *http.Client + { + tr := http.DefaultTransport.(*http.Transport).Clone() + tr.ResponseHeaderTimeout = time.Duration(10) * time.Second + tlsConfig := &tls.Config{InsecureSkipVerify: true} + tr.TLSClientConfig = tlsConfig + h := http.DefaultClient + h.Transport = tr + httpClient = h + } + + refreshCtx := context.Background() + + storage, err := jwkset.NewStorageFromHTTP(config.Authentication.JwkEndpoint, jwkset.HTTPClientStorageOptions{ + Client: httpClient, + Ctx: refreshCtx, + HTTPExpectedStatus: http.StatusOK, + HTTPMethod: http.MethodGet, + HTTPTimeout: time.Duration(10) * time.Second, + NoErrorReturnFirstHTTPReq: true, + RefreshInterval: time.Duration(10) * time.Minute, + RefreshErrorHandler: func(ctx context.Context, err error) { + logger.Error().Err(err).Ctx(ctx).Str("url", config.Authentication.JwkEndpoint).Msg("failed to refresh JWK Set from IDP") + }, + //ValidateOptions: jwkset.JWKValidateOptions{}, + }) + if err != nil { + return nil, err + } + + jwksFunc, err := keyfunc.New(keyfunc.Options{ + Ctx: refreshCtx, + UseWhitelist: []jwkset.USE{jwkset.UseSig}, + Storage: storage, + }) + if err != nil { + return nil, err + } + + return &AuthenticationApi{ + config: config, + mux: mux, + logger: logger, + metrics: metrics, + tracer: tracer, + refreshCtx: refreshCtx, + jwksFunc: jwksFunc, + }, nil +} + +func (a AuthenticationApi) ServeHTTP(w http.ResponseWriter, r *http.Request) { + a.mux.ServeHTTP(w, r) +} + +type SuccessfulAuthResponse struct { + Subject string `json:"subject"` + Roles []string `json:"roles,omitempty"` +} + +func (SuccessfulAuthResponse) Render(w http.ResponseWriter, r *http.Request) error { + return nil +} + +type FailedAuthResponse struct { + Reason string `json:"reason,omitempty"` +} + +func (FailedAuthResponse) Render(w http.ResponseWriter, r *http.Request) error { + return nil +} + +type CustomClaims struct { + Roles []string `json:"roles,omitempty"` + AuthorizedParties jwt.ClaimStrings `json:"azp,omitempty"` + SessionId string `json:"sid,omitempty"` + AuthenticationContextClassReference string `json:"acr,omitempty"` + Scope jwt.ClaimStrings `json:"scope,omitempty"` + EmailVerified bool `json:"email_verified,omitempty"` + Name string `json:"name,omitempty"` + Groups jwt.ClaimStrings `json:"groups,omitempty"` + PreferredUsername string `json:"preferred_username,omitempty"` + GivenName string `json:"given_name,omitempty"` + FamilyName string `json:"family_name,omitempty"` + Uuid string `json:"uuid,omitempty"` + Email string `json:"email,omitempty"` + + jwt.RegisteredClaims +} + +var authRegex = regexp.MustCompile(`(?i)^(Basic|Bearer)\s+(.+)$`) + +func (a AuthenticationApi) failedAuth() { + a.metrics.Attempts.WithLabelValues(metrics.OutcomeLabel, metrics.AttemptFailureOutcome).Inc() +} +func (a AuthenticationApi) succeededAuth() { + a.metrics.Attempts.WithLabelValues(metrics.OutcomeLabel, metrics.AttemptSuccessOutcome).Inc() +} + +func (a AuthenticationApi) Authenticate(w http.ResponseWriter, r *http.Request) { + _, span := a.tracer.Start(r.Context(), "authenticate") + defer span.End() + + auth := r.Header.Get("Authorization") + if auth == "" { + a.logger.Warn().Msg("missing Authorization header") + w.WriteHeader(http.StatusBadRequest) // authentication header is missing altogether + _ = render.Render(w, r, FailedAuthResponse{Reason: "Missing Authorization header"}) + a.failedAuth() + return + } + matches := authRegex.FindStringSubmatch(auth) + if matches == nil || len(matches) != 3 { + a.logger.Warn().Msg("unsupported Authorization header") + w.WriteHeader(http.StatusBadRequest) // authentication header is unsupported + _ = render.Render(w, r, FailedAuthResponse{Reason: "Unsupported Authorization header"}) + a.failedAuth() + return + } + + if matches[1] == "Basic" { + span.SetAttributes(attribute.String("authenticate.scheme", "basic")) + a.metrics.Attempts.WithLabelValues(metrics.TypeLabel, metrics.BasicType).Inc() + + username, password, ok := r.BasicAuth() + if !ok { + a.logger.Warn().Msg("failed to decode basic credentials") + w.WriteHeader(http.StatusBadRequest) // failed to decode the basic credentials + _ = render.Render(w, r, FailedAuthResponse{Reason: "Failed to decode basic credentials"}) + a.failedAuth() + return + } + if password == "secret" { + _ = render.Render(w, r, SuccessfulAuthResponse{Subject: username}) + a.succeededAuth() + } else { + a.logger.Info().Str("username", username).Msg("authentication failed") + w.WriteHeader(http.StatusUnauthorized) + _ = render.Render(w, r, FailedAuthResponse{Reason: "Unauthorized credentials"}) + a.failedAuth() + return + } + } else if matches[1] == "Bearer" { + span.SetAttributes(attribute.String("authenticate.scheme", "bearer")) + a.metrics.Attempts.WithLabelValues(metrics.TypeLabel, metrics.BearerType).Inc() + + claims := &CustomClaims{} + tokenString := matches[2] + token, err := jwt.ParseWithClaims(tokenString, claims, a.jwksFunc.Keyfunc, jwt.WithExpirationRequired(), jwt.WithLeeway(5*time.Second)) + if err != nil { + a.logger.Warn().Err(err).Msg("failed to parse bearer token") + w.WriteHeader(http.StatusBadRequest) // failed to parse bearer token + _ = render.Render(w, r, FailedAuthResponse{Reason: "Failed to parse bearer token"}) + return + } + + a.logger.Info().Str("type", matches[1]).Interface("header", token.Header).Interface("claims", token.Claims).Bool("valid", token.Valid).Msgf("successfully parsed token") + + if typedClaims, ok := token.Claims.(*CustomClaims); ok && token.Valid { + sub := typedClaims.PreferredUsername + if sub == "" { + sub, err = typedClaims.GetSubject() + if err != nil { + a.logger.Warn().Err(err).Msg("failed to retrieve sub claim from token") + w.WriteHeader(http.StatusBadRequest) // failed to extract sub claim from bearer token + _ = render.Render(w, r, FailedAuthResponse{Reason: "Failed to extract sub claim from bearer token"}) + return + } + } + _ = render.Render(w, r, SuccessfulAuthResponse{Subject: sub, Roles: claims.Roles}) + } else { + w.WriteHeader(http.StatusBadRequest) // failed to extract sub claim from bearer token + _ = render.Render(w, r, FailedAuthResponse{Reason: "Failed to parse bearer token"}) + return + } + } else { + a.metrics.Attempts.WithLabelValues(metrics.TypeLabel, metrics.UnsupportedType).Inc() + + w.WriteHeader(http.StatusBadRequest) // authentication header is unsupported + _ = render.Render(w, r, FailedAuthResponse{Reason: "Unsupported Authorization type"}) + return + } +} diff --git a/services/auth-api/pkg/service/http/v0/service_test.go b/services/auth-api/pkg/service/http/v0/service_test.go new file mode 100644 index 0000000000..23f3520ce6 --- /dev/null +++ b/services/auth-api/pkg/service/http/v0/service_test.go @@ -0,0 +1,17 @@ +package svc + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestRegex(t *testing.T) { + require := require.New(t) + + matches := authRegex.FindStringSubmatch("Basic abc") + require.NotNil(matches) + require.Len(matches, 3) + require.Equal("Basic", matches[1]) + require.Equal("abc", matches[2]) +} diff --git a/services/graph/pkg/config/config.go b/services/graph/pkg/config/config.go index 6386025a84..28095679eb 100644 --- a/services/graph/pkg/config/config.go +++ b/services/graph/pkg/config/config.go @@ -177,3 +177,4 @@ type Store struct { AuthUsername string `yaml:"username" env:"OC_PERSISTENT_STORE_AUTH_USERNAME;GRAPH_STORE_AUTH_USERNAME" desc:"The username to authenticate with the store. Only applies when store type 'nats-js-kv' is configured." introductionVersion:"1.0.0"` AuthPassword string `yaml:"password" env:"OC_PERSISTENT_STORE_AUTH_PASSWORD;GRAPH_STORE_AUTH_PASSWORD" desc:"The password to authenticate with the store. Only applies when store type 'nats-js-kv' is configured." introductionVersion:"1.0.0"` } + diff --git a/services/groupware/.gitignore b/services/groupware/.gitignore new file mode 100644 index 0000000000..9989a9005f --- /dev/null +++ b/services/groupware/.gitignore @@ -0,0 +1,4 @@ +/swagger.yml +/api.html +/api.html.template +/node_modules diff --git a/services/groupware/DEVELOPER.md b/services/groupware/DEVELOPER.md new file mode 100644 index 0000000000..78a4150103 --- /dev/null +++ b/services/groupware/DEVELOPER.md @@ -0,0 +1,761 @@ +# Groupware Developer Guide + + + +## Introduction + +The Groupware component of OpenCloud is implemented as a (micro)service within the OpenCloud framework (see `./services/groupware/`). + +It is essentially providing a REST API to the OpenCloud UI clients (web, mobile) that is high-level and adapted to the needs of the UIs. + +The implementation of that REST API turns those high-level APIs into lower-level [JMAP](https://jmap.io/) API calls to [Stalwart, the JMAP mail server](https://stalw.art/), using our own JMAP client library in `./pkg/jmap/` with a couple of additional RFCs used by JMAP in `./pkg/jscalendar` and `./pkg/jscontact`. + +## Repository + +The code lives in the same tree as the other OpenCloud backend services, albeit currently in the `groupware` branch, that gets rebased on `main` on a regular basis (at least once per week.) + +Use [the `groupware` branch](https://github.com/opencloud-eu/opencloud/tree/groupware) + +```bash +cd ~/src/opencloud/ +OCDIR="$PWD" +git clone --branch groupware git@github.com:opencloud-eu/opencloud.git +``` + +Note that setting the variable `OCDIR` is merely going to help us with keeping the instructions below as generic as possible, it is not an environment variable that is used by OpenCloud. + +### Tools Repository + +Also, you might want to check out these [helper scripts in opencloud-tools](https://github.com/pbleser-oc/opencloud-tools) somewhere and put that directory into your `PATH`, as it contains scripts to test and build the OpenCloud Groupware: + +```bash +cd "$OCDIR/" +git clone git@github.com:pbleser-oc/opencloud-tools.git ./bin +echo "export PATH=\"\$PATH:$OCDIR/bin\"" >> ~/.bashrc +``` + +#### Tools Prerequisites + +Those scripts have the following prerequisites: + +* the [`jq`](https://github.com/jqlang/jq) JSON query command-line tool to extract access tokens, +* either the [httpie](https://httpie.io/cli) (`pipx install httpie`) or [`xh`](https://github.com/ducaale/xh) (`cargo install xh --locked`) command-line HTTP clients, just out of convenience as their output is much nicer than curl's +* `curl` as well, to retrieve the access tokens from Keycloak (no need for nice output there) + +## Configuration + +Since we require having a Stalwart container running at the very least, the preferred way of running OpenCloud and its adjacent services for developing the Groupware component is by using the `opencloud_full` Docker Compose setup in `$OCDIR/opencloud/devtools/deployments/opencloud_full/`. + +This section will explain how to configure that Docker Compose setup for the needs of the Groupware backend. + +### Hosts + +The default hostname domain for the containers is `.opencloud.test` + +Make sure to have the following entries in your `/etc/hosts`: + +```ruby +127.0.0.1 cloud.opencloud.test +127.0.0.1 keycloak.opencloud.test +127.0.0.1 wopiserver.opencloud.test +127.0.0.1 mail.opencloud.test +127.0.0.1 collabora.opencloud.test +127.0.0.1 traefik.opencloud.test +127.0.0.1 stalwart.opencloud.test +``` + +Alternatively, use the following shell snippet to extract it in a more automated fashion: + +```bash +cd "$OCDIR/opencloud/devtools/deployments/opencloud_full/" + +perl -ne 'if (/^([A-Z][A-Z0-9]+)_DOMAIN=(.*)$/) { print length($2) < 1 ? lc($1).".opencloud.test" : $2,"\n"}' <.env\ +|sort|while read n; do\ +grep -w -q "$n" /etc/hosts && echo -e "\e[32;4mexists :\e[0m $n: \e[32m$(grep -w $n /etc/hosts)\e[0m">&2 ||\ +{ echo -e "\e[33;4mmissing:\e[0m ${n}" >&2; echo -e "127.0.0.1\t${n}";};\ +done \ +| sudo tee -a /etc/hosts +``` + +### Compose + +There are two options, either + +1. running the Groupware backend with OpenLDAP and Keycloak containers, more akin to a production setup; +2. running the Groupware backend using the built-in LDAP and OIDC services, for a minimalistic setup that uses less resources and is more likely to be found in a home lab setup. + +In either case, the Docker Compose configuration in `$OCDIR/opencloud/devtools/deployments/opencloud_full/` needs to be modified. + +#### Production Setup + + + +```mermaid +--- +title: Production Setup +--- +flowchart LR + oc["`opencloud`"] + c["client"] + kc["`keycloak`"] + ol["`ldap-server`"] + st["`stalwart`"] + + c -- http --> oc + oc -- jmap --> st + oc --> ol + st --> ol + kc --> ol + c --> kc +``` + +Edit `$OCDIR/opencloud/devtools/deployments/opencloud_full/.env`, making the following changes (make sure to check out [the shell command-line that automates all of that, below](#automate-env-setup-prod)): + +* change the container image to `opencloudeu/opencloud:dev`: + +```diff +-OC_DOCKER_IMAGE=opencloudeu/opencloud-rolling ++OC_DOCKER_IMAGE=opencloudeu/opencloud +-OC_DOCKER_TAG= ++OC_DOCKER_TAG=dev +``` + +* add the `groupware` service to `START_ADDITIONAL_SERVICES`: + +```diff +-START_ADDITIONAL_SERVICES="notifications" ++START_ADDITIONAL_SERVICES="notifications,groupware" +``` + +* enable the OpenLDAP container: + +```diff +-#LDAP=:ldap.yml ++LDAP=:ldap.yml +``` + +* enable the Keycloak container: + +```diff +-#KEYCLOAK=:keycloak.yml ++KEYCLOAK=:keycloak.yml +``` + +* enable the Stalwart container: + +```diff +-#STALWART=:stalwart.yml ++STALWART=:stalwart.yml +``` + +* optionally disable the Collabora container + +```diff +-COLLABORA=:collabora.yml ++#COLLABORA=:collabora.yml +``` + +* optionally disable UI containers + +```diff +-UNZIP=:web_extensions/unzip.yml +-DRAWIO=:web_extensions/drawio.yml +-JSONVIEWER=:web_extensions/jsonviewer.yml +-PROGRESSBARS=:web_extensions/progressbars.yml +-EXTERNALSITES=:web_extensions/externalsites.yml ++#UNZIP=:web_extensions/unzip.yml ++#DRAWIO=:web_extensions/drawio.yml ++#JSONVIEWER=:web_extensions/jsonviewer.yml ++#PROGRESSBARS=:web_extensions/progressbars.yml ++#EXTERNALSITES=:web_extensions/externalsites.yml +``` + + +All those changes above can be automated with the following script: + +```bash +cd "$OCDIR/opencloud/devtools/deployments/opencloud_full/" +perl -pi -e ' + s|^(OC_DOCKER_IMAGE)=.*$|$1=opencloudeu/opencloud|; + s|^(OC_DOCKER_TAG)=.*$|$1=dev|; + s|^(START_ADDITIONAL_SERVICES=".*(? + +```mermaid +--- +title: Homelab Setup +--- +flowchart LR + oc["`opencloud`"] + c["client"] + st["`stalwart`"] + + c -- http --> oc + oc -- jmap --> st + st -- ldap --> oc + +``` + +Edit `$OCDIR/opencloud/devtools/deployments/opencloud_full/.env`, making the following changes (make sure to check out [the shell command-line that automates all of that, below](#automate-env-setup-homelab)): + +* change the container image to `opencloudeu/opencloud:dev`: + +```diff +-OC_DOCKER_IMAGE=opencloudeu/opencloud-rolling ++OC_DOCKER_IMAGE=opencloudeu/opencloud +-OC_DOCKER_TAG= ++OC_DOCKER_TAG=dev +``` + +* enable the creation of demo users: + +```diff +-DEMO_USERS= ++DEMO_USERS=true +``` + +* add the `groupware` service to `START_ADDITIONAL_SERVICES`: + +```diff +-START_ADDITIONAL_SERVICES="notifications" ++START_ADDITIONAL_SERVICES="notifications,groupware" +``` + +* enable the Stalwart container: + +```diff +-#STALWART=:stalwart.yml ++STALWART=:stalwart.yml +``` + +* while not required, it is recommended to enable basic authentication support which, while less secure, allows for easier tooling when developing and testing HTTP APIs, by adding `PROXY_ENABLE_BASIC_AUTH=true` somewhere before the last line of the `.env` file: + +```diff + # Domain of Stalwart + # Defaults to "stalwart.opencloud.test" + STALWART_DOMAIN= + ++# Enable basic authentication to facilitate HTTP API testing ++# Do not do this in production. ++PROXY_ENABLE_BASIC_AUTH=true ++ + ## IMPORTANT ## +``` + +* optionally disable the Collabora container + +```diff +-COLLABORA=:collabora.yml ++#COLLABORA=:collabora.yml +``` + +* optionally disable UI containers + +```diff +-UNZIP=:web_extensions/unzip.yml +-DRAWIO=:web_extensions/drawio.yml +-JSONVIEWER=:web_extensions/jsonviewer.yml +-PROGRESSBARS=:web_extensions/progressbars.yml +-EXTERNALSITES=:web_extensions/externalsites.yml ++#UNZIP=:web_extensions/unzip.yml ++#DRAWIO=:web_extensions/drawio.yml ++#JSONVIEWER=:web_extensions/jsonviewer.yml ++#PROGRESSBARS=:web_extensions/progressbars.yml ++#EXTERNALSITES=:web_extensions/externalsites.yml +``` + + +All those changes above can be automated with the following script: + +```bash +cd "$OCDIR/opencloud/devtools/deployments/opencloud_full/" +perl -pi -e ' + BEGIN{$basic_auth=0} + s|^(OC_DOCKER_IMAGE)=.*$|$1=opencloudeu/opencloud|; + s|^(OC_DOCKER_TAG)=.*$|$1=dev|; + s|^(START_ADDITIONAL_SERVICES=".*(?Bind DN: `cn=admin,dc=opencloud,dc=eu` +* Bind Password: `admin` +* Base DN: `ou=users,dc=opencloud,dc=eu` +* Host: `localhost` +* LDAP Port: `389` +* LDAPS Port: `636` + +Run the following command on your host (requires the `ldap-tools` package with the `ldapsearch` CLI tool), which should output a list of DNs of demo users: + +```bash +ldapsearch -h localhost -D 'cn=admin,dc=opencloud,dc=eu' \ +-x -w 'admin' -b 'ou=users,dc=opencloud,dc=eu' -LLL \ +'(objectClass=person)' dn +``` + +Sample output: + +```ldif +dn: uid=alan,ou=users,dc=opencloud,dc=eu + +dn: uid=lynn,ou=users,dc=opencloud,dc=eu + +dn: uid=mary,ou=users,dc=opencloud,dc=eu + +dn: uid=admin,ou=users,dc=opencloud,dc=eu + +dn: uid=dennis,ou=users,dc=opencloud,dc=eu + +dn: uid=margaret,ou=users,dc=opencloud,dc=eu + +``` + +#### Homelab Setup LDAP + +Instead, when using the “homelab” setup (as depicted in section [Homelab Setup](#homelab-setup) above), queries cannot be performed directly from the host \ +but, instead, require spinning up another container in the same Docker network and do so from there. + +The necessary LDAP parameters are as follows: + +* Bind DN: `uid=libregraph,ou=sysusers,o=libregraph-idm` +* Bind Password: `admin` (or whichever password is set in the `IDM_REVASVC_PASSWORD` environment variable in `opencloud.yml`) +* Base DN: `o=libregraph-idm` +* Host: `localhost` +* LDAP Port: none, only supports LDAPS +* LDAPS Port: `9235` + +To access the LDAP tree, spawn a new container in the same network, e.g. like this for a Debian 12 container: + +```bash +docker run --network 'opencloud_full_opencloud-net' --rm \ +--name "debian-${RANDOM}" -ti 'debian:12' +``` + +In that container, install the necessary packages to have the LDAP command-line tools: + +```bash +apt-get update -y && apt-get install -y ca-certificates ldap-utils +``` + +Alternatively, the same can be achieved with an Alpine container: + +```bash +docker run --network 'opencloud_full_opencloud-net' --rm \ +--name "alpine-${RANDOM}" -ti 'alpine' +``` + +And running this command instead to install the LDAP command-line tools: + +```bash +apk update && apk install openldap-clients +``` + +Run the following command in that container, which should output a list of DNs of demo users: + +```bash +LDAPTLS_REQCERT=never ldapsearch -H ldaps://opencloud:9235 \ +-D 'uid=reva,ou=sysusers,o=libregraph-idm' -x -w 'admin' \ +-b 'o=libregraph-idm' -LLL \ +'(objectClass=person)' dn +``` + +> [!NOTE] +> The `LDAPTLS_REQCERT` environment variable is set to `never` to prevent the `ldapsearch` application to validate the TLS certificate of the LDAP server, since we are using self-signed certificates for all those services in the devtools setups. + +Sample output: + +```ldif +dn: uid=admin,ou=users,o=libregraph-idm + +dn: uid=alan,ou=users,o=libregraph-idm + +dn: uid=lynn,ou=users,o=libregraph-idm + +dn: uid=mary,ou=users,o=libregraph-idm + +dn: uid=margaret,ou=users,o=libregraph-idm + +dn: uid=dennis,ou=users,o=libregraph-idm + +``` + +Alternatively, as a one-liner using an Alpine Docker image: + +```bash +docker run --network 'opencloud_full_opencloud-net' --rm -ti alpine:3 \ +/bin/sh -c "apk update && apk add openldap-clients && exec /bin/sh -il" +``` + +### Testing Keycloak + +> [!NOTE] +> Only available in the [“production” setup](#prod-setup) + +To check whether it works correctly, the following `curl` command: + +```bash +curl -ks -D- -X POST \ +"https://keycloak.opencloud.test/realms/openCloud/protocol/openid-connect/token" \ +-d username=alan -d password=demo -d grant_type=password \ +-d client_id=groupware -d scope=openid +``` + +should provide you with a JSON response that contains an `access_token` property. + +If it is not set up correctly, it should give you this instead: + +```json +{"error":"invalid_client","error_description":"Invalid client or Invalid client credentials"} +``` + +### Testing Stalwart + +To then test the IMAP authentication with Stalwart, run the following command on your host (requires the `openssl` CLI tool): + +```bash +openssl s_client -crlf -connect localhost:993 +``` + +When then greeted with the following prompt: + +```java +* OK [CAPABILITY ...] Stalwart IMAP4rev2 at your service. +``` + +enter the following command: + +```bash +A LOGIN alan demo +``` + +to which one should receive the following response: + +```java +A OK [CAPABILITY IMAP4rev2 ...] Authentication successful +``` + +## Feeding an Inbox + +Once a [Stalwart](https://stalw.art/) container is running (using the Docker Compose setup as explained above), use [`imap-filler`](https://github.com/opencloud-eu/imap-filler/) to populate the inbox folder via [`IMAP APPEND`](https://www.rfc-editor.org/rfc/rfc9051.html#name-append-command): + +```bash +cd "$OCDIR/" +git clone git@github.com:opencloud-eu/imap-filler.git +cd ./imap-filler/ +go run . --username=alan --password=demo \ + --url=localhost:993 \ + --empty=true \ + --folder=Inbox \ + --senders=6 \ + --count=50 +``` + +> [!NOTE] +> Note that this operation does not use the Groupware APIs or any other OpenCloud backend services either, +> as it directly communicates with Stalwart via IMAPS on port `993` which is mapped on the host. + +For more details on the usage of that little helper tool, consult its [`README.md`](https://github.com/opencloud-eu/imap-filler/blob/main/README.md), although it is quite self-explanatory. + +> [!NOTE] +> This only needs to be done once, since the emails are stored in a volume used by the Stalwart container. + +## Setting up Stalwart Principals + +To make things more interesting, we might want to create some resources that are currently not captured by our LDAP structure and/or not part of our demo users, such as by + + * adding quota to users, to have quota limits show up in the JMAP payloads; + * add groups, to have them listed as additional accounts for the users that are members of those groups; + * add mailing-lists + +Those things can either be done using the Stalwart administration web UI, manually, or by using its [Management API](https://stalw.art/docs/api/management/endpoints/). + +For the latter, we have another helper tool that has the ability, among a few other things, to take a file with a desired state and apply the necessary changes accordingly to the current state. + +```bash +cd "$OCDIR/" +git clone git@github.com:opencloud-eu/stalwart-admin.git +cd ./stalwart-admin/ +go run . principal import --log-level=info --activate -f "$OCDIR/opencloud/services/groupware/demo-principals.yaml" +``` + +## Setting Quota in Stalwart + +Use the [Stalwart Management API](https://stalw.art/docs/category/management-api) to set the quota for a user if you want to test quota-related Groupware APIs. + +Note that users that exist in OpenCloud (specifically in the LDAP, be it OpenLDAP or the built-in IDM) are only visible in Stalwart after they have been authenticated successfully once, e.g. by retrieving a [JMAP Session](https://jmap.io/spec-core.html#the-jmap-session-resource), which can be performed using the helper script `oc-st-session` (which uses the environment variable `username` to determine the username), or using `curl` directly as follows: + +```bash +curl -L -k -s -u alan:demo https://stalwart.opencloud.test/.well-known/jmap +``` + +The following examples perform operations on the user `alan`. + +### Display current Quota + +```bash +curl -k -s -u mailadmin:admin https://stalwart.opencloud.test/api/principal/alan | jq +``` + +### Modify current Quota + +We will change the quota to 256 MB, and since the value is in bytes: + +```bash +value=$(( 256 * 1024 * 1024 )) +curl -k -s -u mailadmin:admin -X PATCH https://stalwart.opencloud.test/api/principal/alan -d '[{"action":"set", "field":"quota", "value":'${value}'}]' +``` + +## Building after Changes + +If you run the `opencloud` service as a container, use the following script to update the container image and restart it: + +```bash +oc-full-update +``` + +If you prefer to do so without that script: + +```bash +cd "$OCDIR/opencloud/" +make -C opencloud/ clean build dev-docker +cd devtools/deployments/opencloud_full/ +docker compose up -d opencloud +``` + +If you run it from your IDE, there is obviously no need to do that. + +## API Docs + +The REST API documentation is extracted from the source code structure and documentation using [`go-swagger`](https://goswagger.io/go-swagger/), which needs to be installed locally as a prerequisite: + +```bash +go install github.com/go-swagger/go-swagger/cmd/swagger@latest +``` + +The build chain is integrated within the `Makefile` in `services/groupware/`: + +```bash +cd "$OCDIR/opencloud/services/groupware/" +make apidoc-static +``` + +That creates a static documentation HTML file using [redocly](https://redocly.com/) named `api.html` + +```bash +firefox ./api.html +``` + +Note that `redocly-cli` does not need to be installed, it will be pulled locally by the `Makefile`, provided that you have [pnpm](https://pnpm.io/) installed as a pre-requisite, which is already necessary for other OpenCloud components. + +## Testing + +This section assumes that you are using the [helper scripts in opencloud-tools](https://github.com/pbleser-oc/opencloud-tools) as instructed above. + +Your main swiss army knife tool will be `oc-gw` (mnemonic for "OpenCloud Groupware"). + +As prerequisites, you should have `curl` and either [`http`(ie)](https://httpie.io/cli) or [`xh`](https://github.com/ducaale/xh) installed, in order to have a modern CLI HTTP client that is more helpful than plain old `curl`. + +* `http` can be installed as follows: `pipx install httpie`, +* while `xh` can be installed as follows: `cargo install xh --locked` + +As for credentials, `oc-gw` defaults to using the user `alan` (with the password `demo`), which can be changed by setting the following environment variables: + +* `username` +* `password` + +Example: + +```bash +username=margaret password=demo oc-gw //accounts/all/quotas +``` + +To set them more permanently for the lifetime of a shell: + +```bash +export username=lynn +export password=demo + +oc-gw //accounts/all/mailboxes + +oc-gw //accounts/all/mailboxes/roles/inbox +``` + +The `oc-gw` script does the following regarding authentication: + +* checks whether a container named `opencloud_full-opencloud-1` is running locally + * if so, whether it has basic auth enabled or not + * if yes, uses basic auth directly to authenticate against the OpenCloud Proxy service that ingresses for the OpenCloud Groupware backend, using the credentials defined in the environment variables `username` and `password` (defaulting to `alan`/`demo`) + * if not, always retrieves a fresh access token from Keycloak, using the credentials defined in the environment variables `username` and `password` (defaulting to `alan`/`demo`), using the "Direct Access Grant" OIDC API of Keycloak and then use that JWT for Bearer authentication against the OpenCloud Groupware REST API + * if no such container is running locally, it assumes that the `opencloud` process is running from within an IDE, with its OpenCloud Proxy service listening on `https://localhost:9200` + +It will also save you some typing as whenever you use `//` for the URL, it will replace that by the Groupware REST API base URL, e.g. + +```bash +oc-gw //accounts +``` + +will be translated into + +```bash +http https://cloud.opencloud.test/groupware/accounts +``` + +The first thing you might want to test is to query the index, which will ensure everything is working properly, including the authentication and the communication between the Groupware and Stalwart: + +```bash +oc-gw // +``` + +Obviously, you may use whichever HTTP client you are most comfortable with. + +Here is how to do it without the `oc-gw` script, using [`curl`](https://curl.se/): + +When using the “production” setup, first make sure to retrieve a JWT for authentication from Keycloak: + +```bash +token=$(curl --silent --insecure --fail -X POST \ +"https://keycloak.opencloud.test/realms/openCloud/protocol/openid-connect/token" \ +-d username="alan" -d password="demo" \ +-d grant_type=password -d client_id="groupware" -d scope=openid \ +| jq -r '.access_token') +``` + +Then use that token to authenticate the Groupware API request: + +```bash +curl --insecure -s -H "Authorization: Bearer ${token}" "https://cloud.opencloud.test/groupware/" +``` + +When using the “homelab” setup, authenticate directly using basic auth: + +```bash +curl --insecure -s -u "alan:demo" "https://cloud.opencloud.test/groupware/" +``` + +> [!TIP] +> Until everything is documented, the complete list of URI routes can be found in \ +[`$OCDIR/opencloud/services/groupware/pkg/groupware/groupware_route.go`](./pkg/groupware/groupware_route.go) + +## Services + +### Stalwart + +#### Web UI + +To access the Stalwart admin UI, open and use the following credentials to log in: + +* username: `mailadmin` +* password: `admin` + +The usual admin username `admin` had to be changed into `mailadmin` because there is already an `admin` user that ships with the default users in OpenCloud, and Stalwart always checks the LDAP directory before its internal usernames. + +Those credentials are configured in `devtools/deployments/opencloud_full/config/stalwart/config.toml`: + +```ruby +authentication.fallback-admin.secret = "$6$4qPYDVhaUHkKcY7s$bB6qhcukb9oFNYRIvaDZgbwxrMa2RvF5dumCjkBFdX19lSNqrgKltf3aPrFMuQQKkZpK2YNuQ83hB1B3NiWzj." +authentication.fallback-admin.user = "mailadmin" +``` + +#### Restart from Scratch + +To start with a Stalwart container from scratch, removing all the data (including emails): + +```bash +cd "$OCDIR/opencloud/devtools/deployments/opencloud_full" +docker compose rm stalwart --stop +docker volume rm opencloud_full_stalwart-data +docker compose up -d stalwart +``` + +#### Diagnostics + +If anything goes wrong, the first thing to check is Stalwart's logs, that are configured on the most verbose level (trace) and should thus provide a lot of insight: + +```bash +docker logs -f opencloud_full-stalwart-1 +``` diff --git a/services/groupware/Makefile b/services/groupware/Makefile new file mode 100644 index 0000000000..571ebe98af --- /dev/null +++ b/services/groupware/Makefile @@ -0,0 +1,39 @@ +SHELL := bash +NAME := groupware + +ifneq (, $(shell command -v go 2> /dev/null)) # suppress `command not found warnings` for non go targets in CI +include ../../.bingo/Variables.mk +endif + +include ../../.make/default.mk +include ../../.make/go.mk +include ../../.make/release.mk +include ../../.make/docs.mk + +.PHONY: apidoc +apidoc: swagger.yml + +.PHONY: tsnode +tsnode: node_modules + +.PHONY: node_modules +node_modules: + pnpm install + +.PHONY: swagger.yml +swagger.yml: apidoc.yml tsnode + swagger generate spec --include='groupware' --include='jmap' --include='jscalendar' --include='jscontact' --scan-models --input=$< | NODE_OPTIONS='--no-warnings --loader ts-node/esm' pnpm exec ts-node apidoc-process.ts > $@ + +APIDOC_PORT=9999 + +.PHONY: serve-apidoc +serve-apidoc: swagger.yml tsnode + swagger serve --no-open --port=$(APIDOC_PORT) --host=127.0.0.1 --flavor=redoc $< + +api.html: swagger.yml favicon.png tsnode + pnpm exec redocly build-docs --output=$@.template --title="OpenCloud Groupware API" --theme.openapi.hideHostname=false --theme.openapi.hideTryItPanel=false --theme.openapi.pathInMiddlePanel=true $< + NODE_OPTIONS='--no-warnings --loader ts-node/esm' pnpm exec ts-node ./apidoc-postprocess-html.ts favicon.png < $@.template > $@ + rm $@.template + +.PHONY: apidoc-static +apidoc-static: api.html diff --git a/services/groupware/README.md b/services/groupware/README.md new file mode 100644 index 0000000000..a476e53422 --- /dev/null +++ b/services/groupware/README.md @@ -0,0 +1,30 @@ +# Groupware + +The OpenCloud Groupware service provides a REST API for performing all the backend operations needed by the OpenCloud Groupware frontends. + +## OpenAPI Documentation + +To generate the OpenAPI ("Swagger") documentation of the REST API, [`pnpm`](https://pnpm.io/) is a pre-requisite. + +Run the following command in this directory to generate the `swagger.yml` OpenAPI definition file: + +```bash +make apidoc +``` + +To generate a static HTML file using [Redocly](https://redocly.com/), which will generate a file `api.html`: + +```bash +make apidoc-static +``` + +### Path Parameters + +Path parameters are documented in the file [`api-params.yaml`](file:api-params.yaml) and injected into the OpenAPI specification using the script [`apidoc-process.ts`](file:apidoc-process.ts) (which is done automatically when using the `Makefile` as described above.) + +### Favicon + +A [favicon](https://developer.mozilla.org/en-US/docs/Glossary/Favicon) is inserted into the static (Redocly) HTML file as part of the build process in the `Makefile`, using [`favicon.png`](file:favicon.png) as the source, computing its base64 to insert it as an image using a [data URL](https://developer.mozilla.org/en-US/docs/Web/URI/Reference/Schemes/data) in order to embed it. + +That is performed by the script [`apidoc-postprocess-html.ts`](file:apidoc-postprocess-html.ts) (which is done automatically when using then `Makefile` as described above.) + diff --git a/services/groupware/api-examples.yaml b/services/groupware/api-examples.yaml new file mode 100644 index 0000000000..ddc474c6cc --- /dev/null +++ b/services/groupware/api-examples.yaml @@ -0,0 +1,303 @@ +examples: + refs: + accountId: 'a' + emailId: 'bmaaaaa2' + threadId: 'b' + mailboxIds: {"a": true} + emailKeywords: ["$seen", "$notjunk"] + emailSize: 3794 + emailReceivedAt: '2025-09-23T10:58:03Z' + emailSentAt: '2025-09-23T12:58:03+02:00' + blobId: 'cfz7vkmhcfwl1gfln02hga2fb3xwsqirirousda0rs1soeosla2p1aiaahcqjwaf' + attachmentType: 'application/pdf' + attachmentSize: 192128 + attachmentDisposition: 'attachment' + attachmentPartId: '3' + attachmentCharset: 'utf-8' + attachmentCid: 'c1' + emailAddressName: 'Camina Drummer' + emailAddressEmail: 'drummer@opa.org' + emailSenders: + - name: 'Chrisjen Avasarala' + email: 'secgen@earth.gov' + emailFroms: + - name: 'Chrissie' + email: 'secgen@earth.gov' + emailTos: + - name: 'Camina Drummer' + email: 'drummer@opa.org' + emailCCs: + - name: 'Naomi Nagata' + email: 'nagata@opa.org' + - name: 'James Holden' + email: 'holden@earth.gov' + emailBCCs: + - name: 'Fred Johnson' + email: 'johnson@opa.org' + emailSubject: 'Food for thought' + emailPreview: >- + No one starts a war unless I say then can. + emailAttachments: + - partId: '2' + blobId: 'cfz7vkmhcfwl1gfln02hga2fb3xwsqirirousda0rs1soeosla2p1aiaahcqjwaf' + size: 1374 + type: 'application/pdf' + name: 'the_path.pdf' + disposition: 'attachment' + - partId: '3' + blobId: 'cnz7vkmhcfwl1gfln02hga2fb3xwsqirirousda0rs1soeosla2p1aiaahcq0wqo' + charset: 'utf-8' + size: 728 + type: 'text/plain' + name: 'secrets.txt' + disposition: 'attachment' + - partId: '4' + blobId: 'caqyey2wobo2bzjkkp2qlsn1ctitl02yylscnb77lc79nvubjihliaiadq' + size: 787545 + name: 'molecule-design.png' + type: 'image/png' + disposition: 'inline' + cid: 'c1' + inject: + Email: + size: $emailSize + EmailSummary: + size: $emailSize + EmailBodyPart: + size: $attachmentSize + ContactCardKind: 'individual' + ContactCard: + '@type': Contact + addressBookIds: + - aaabc2aa + - c329aaze + addresses: + bu7icohc: + '@type': Address + components: + - '@type': AddressComponent + kind: number + value: '12' + - '@type': AddressComponent + kind: separator + value: ' ' + - '@type': AddressComponent + kind: name + value: 'Gravity Street' + - '@type': AddressComponent + kind: locality + value: 'Medina Station' + - '@type': AddressComponent + kind: region + value: 'Outer Belt' + - '@type': AddressComponent + kind: separator + value: ' ' + - '@type': AddressComponent + kind: postcode + value: '618291' + - '@type': AddressComponent + kind: country + value: 'Sol' + isOrdered: true + defaultSeparator: ', ' + countryCode: 'CA' + coordinates: 'geo:43.6466107,-79.3889872' + timeZone: EDT + contexts: + - delivery: true + - work: true + full: '12 Gravity Street, Medina Station, Outer Belt 618291, Sol' + pref: 1 + anniversaries: + yeex2wiu: + '@type': Anniversary + kind: birth + date: + '@type': PartialDate + year: 1983 + month: 7 + day: 18 + calendarScale: iso8601 + calendars: + uin5daen: + '@type': Calendar + kind: calendar + uri: 'https://ceres.org/calendars/@cdrummer/c1' + mediaType: application/jscontact+json + contexts: + private: true + work: true + pref: 1 + label: main + created: '2025-09-30T11:00:12Z' + cryptoKeys: + iez1thoo: + '@type': CryptoKey + uri: 'https://opa.org/keys/@cdrummer.gpg' + mediaType: application/pgp-keys + contexts: + private: true + work: true + pref: 10 + label: opa + directories: + cich5tah: + '@type': Directory + kind: entry + uri: https://directory.opa.org/addrbook/cdrummer/Camina%20Drummer.vcf + mediaType: text/vcard + ju5iemoh: + '@type': Directory + kind: directory + uri: ldap://ldap.opa.org/o=OPA,ou=Bosmangs + pref: 1 + emails: + xush7tae: + '@type': EmailAddress + address: cdrummer@opa.org + contexts: + work: true + private: true + pref: 10 + label: opa + ra1ohjah: + '@type': EmailAddress + address: camina.drummer@ceres.net + contexts: + private: true + pref: 20 + id: em8ahgha + keywords: + bosmang: true + opa: true + tycho: true + rebel: true + kind: 'individual' + language: en-GB + links: + eech3oib: + '@type': Link + kind: contact + uri: mailto:contact@opa.org + pref: 1 + localizations: {} + media: + ohchae4a: + '@type': Media + kind: photo + uri: https://static.wikia.nocookie.net/expanse/images/c/c7/Tycho-stn-14.png/revision/latest/scale-to-width-down/1000?cb=20170225140521 + mediaType: image/png + members: {} + name: + '@type': Name + components: + - '@type': NameComponent + kind: given + value: Camina + - '@type': NameComponent + kind: surname + value: Drummer + isOrdered: true + defaultSeparator: ' ' + full: 'Camina Drummer' + nicknames: + aumiez4y: + '@type': Nickname + name: Bosmang + contexts: + work: true + pref: 1 + notes: + aep1poov: + '@type': Note + created: '2025-09-30T11:00:12Z' + author: + '@type': Author + name: 'expanse.fandom.com' + uri: 'https://expanse.fandom.com/wiki/Camina_Drummer_(TV)' + note: 'Cammina Drummer is a strong-willed, pragmatic, and no-nonsense Belter captain. Having a strong connection to her roots and her cultural identity, Drummer is a Belter through and through: She is resilient and adaptable, treats her crew with respect and equality, and is committed to the Belter way of life, which involves hard work, communal life shared with others, and not taking anything for granted.' + onlineServices: + ohne9oum: + '@type': OnlineService + service: 'Ring Network' + uri: 'https://ring.example.com/contact/@cdrummer' + user: '@cdrummer18219' + contexts: + private: true + work: true + label: ring + organizations: + eesa1aiv: + '@type': Organization + name: 'Outer Planets Alliance' + sortAs: OPA + contexts: + work: true + personalInfo: + vibi6ine: + '@type': PersonalInfo + kind: expertise + value: loyalty + level: high + phones: + xaecie9e: + '@type': Phone + number: '+1-999-555-1234' + features: + main-number: true + mobile: true + voice: true + text: true + video: true + contexts: + private: true + work: true + pref: 1 + label: main + preferredLanguages: + en: + '@type': LanguagePref + language: en-GB + contexts: + private: true + work: true + prodId: 'Mock 0.0' + relatedTo: + 'urn:uuid:f81d4fae-7dec-11d0-a765-00a0c91e6bf6': + '@type': Relation + relation: + friend: true + '8cacdfb7d1ffdb59@example.com': + '@type': Relation + relation: {} + schedulingAddresses: + xeith5qu: + '@type': SchedulingAddress + uri: 'https://scheduling.example.com/@cdrummer/c1' + contexts: + private: true + work: true + pref: 1 + label: main + speakToAs: + '@type': SpeakToAs + grammaticalGender: feminine + pronouns: + taefie5a: + '@type': Pronouns + pronouns: 'she/her' + contexts: + privatr: true + pref: 1 + titles: + sheetei4: + '@type': Title + name: Bosmang + kind: title + organizationId: eesa1aiv + uid: '05a6dd3b-f393-438e-a858-9024471fd9fc' + updated: '2025-09-30T15:24:01Z' + version: '1.0' + + diff --git a/services/groupware/api-params.yaml b/services/groupware/api-params.yaml new file mode 100644 index 0000000000..20a2e78c63 --- /dev/null +++ b/services/groupware/api-params.yaml @@ -0,0 +1,13 @@ +params: + account: + description: The identifier of the Account to use for this operation + mailbox: + description: The identifier of the Mailbox to perform this operation on + emailid: + description: The identifier of the Email to perform this operation on + addressbookid: + description: The identifier of the AddressBook to perform this operation on + calendarid: + description: The identifier of the Calendar to perform this operation on + tasklistid: + description: The identifier of the TaskList to perform this operation on diff --git a/services/groupware/apidoc-postprocess-html.ts b/services/groupware/apidoc-postprocess-html.ts new file mode 100644 index 0000000000..ec9dc38c3a --- /dev/null +++ b/services/groupware/apidoc-postprocess-html.ts @@ -0,0 +1,25 @@ +import * as fs from 'fs' +import * as cheerio from 'cheerio' + +const faviconFile = process.argv[2] +const favicon = fs.readFileSync(faviconFile).toString('base64') + +let html = '' +process.stdin.on('data', (chunk) => { + html += chunk.toString() +}) +process.stdin.on('end', () => { + try { + const $ = cheerio.load(html) + $('head').append(``) + process.stdout.write($.html()) + process.stdout.write("\n") + } catch (error) { + if (error instanceof Error) { + console.error(`Error occured while post-processing HTML: ${error.message}`) + } else { + console.error("Unknown error occurred") + } + } +}); + diff --git a/services/groupware/apidoc-process.ts b/services/groupware/apidoc-process.ts new file mode 100644 index 0000000000..a85a63b62b --- /dev/null +++ b/services/groupware/apidoc-process.ts @@ -0,0 +1,207 @@ +import * as fs from 'fs' +import * as yaml from 'js-yaml' + +const API_PARAMS_CONFIG_FILE = 'api-params.yaml' +const API_EXAMPLES_CONFIG_FILE = 'api-examples.yaml' + +interface Response { + $ref: string +} + +interface Parameter { + type: string + required: boolean + format: string + example: any + name: string + description: string + in: string +} + +interface VerbData { + tags: string[] + summary: string + description: string | undefined + operationId: string + parameters: Parameter[] + responses: {[status:string]:Response} +} + +interface Item { + $ref: string +} + +interface AdditionalProperties { + $ref: string +} + +interface Property { + description: string + type: string + items: Item + example: any + additionalProperties: AdditionalProperties +} + +interface Definition { + type: string + title: string + required: string[] + properties: {[property:string]:Property} + example: string + examples: string[] +} + +interface OpenApi { + paths: {[path:string]:{[verb:string]:VerbData}} + definitions: {[type:string]:Definition} +} + +interface Param { + description: string + type: string +} + +interface ParamsConfig { + params: {[param:string]:Param} +} + +interface ExamplesConfigExamples { + refs: {[id:string]:any} + inject: {[id:string]:{[property:string]:any}} +} + +interface ExamplesConfig { + examples: ExamplesConfigExamples +} + +let inputData = '' + +process.stdin.on('data', (chunk) => { + inputData += chunk.toString() +}) + +const usedExamples = new Set() +const unresolvedExampleReferences = new Set() + +function processDescription(description: string|null|undefined): string|null|undefined { + if (description !== null && description !== undefined) { + return description.split("\n").map(line => line.replace(/^(\s*)![\*\-]?/, '$1*')).join("\n") + } else { + return description + } +} + +process.stdin.on('end', () => { + try { + const paramsConfig = yaml.load(fs.readFileSync(API_PARAMS_CONFIG_FILE, 'utf8')) as ParamsConfig + const params = paramsConfig.params || {} + + const examplesConfig = yaml.load(fs.readFileSync(API_EXAMPLES_CONFIG_FILE, 'utf8')) as ExamplesConfig + const exampleRefs = examplesConfig.examples.refs + const exampleInjects = examplesConfig.examples.inject + + const data = yaml.load(inputData) as OpenApi + + for (const path in data.paths) { + const pathData = data.paths[path] + + for (const param in params) { + if (path.includes(`{${param}}`)) { + const paramsData = params[param] as Param + for (const verb in pathData) { + const verbData = pathData[verb] + verbData.parameters ??= [] + verbData.parameters.push({ + name: param, + required: true, + type: paramsData.type !== undefined ? paramsData.type : 'string', + in: 'path', + description: paramsData.description, + } as Parameter) + } + } + } + + // do some magic with the formatting of endpoint descriptions: + for (const verb in pathData) { + const verbData = pathData[verb] + verbData.description = processDescription(verbData.description) + } + } + + for (const def in data.definitions) { + const defData = data.definitions[def] + + if (def.startsWith('TypeOf')) { + const value = def.substring('TypeOf'.length) + defData.title = value + defData.example = value + } + + const injects = exampleInjects[def] || {} + if (defData.properties !== null && defData.properties !== undefined) { + for (const prop in defData.properties as any) { + const propData = defData.properties[prop] + + const inject = injects[prop] + if (inject !== null && inject !== undefined) { + propData.example = inject + } + + if (propData.example !== null && propData.example !== undefined) { + if (typeof propData.example === 'string' && (propData.example as string).startsWith('$')) { + const exampleId = propData.example.substring(1) + const value = exampleRefs[exampleId] + if (value === null || value === undefined) { + unresolvedExampleReferences.add(exampleId) + } else { + usedExamples.add(exampleId) + propData.example = value + } + } + } + + propData.description = processDescription(propData.description) + } + } else { + if (typeof(injects) === 'string') { + defData.example = injects + } else if (Array.isArray(injects)) { + defData.examples = injects + } + } + } + + process.stdout.write(yaml.dump(data)) + process.stdout.write("\n") + + if (unresolvedExampleReferences.size > 0) { + console.error(`\x1b[33;1m⚠️ WARNING: unresolved example references not contained in ${API_PARAMS_CONFIG_FILE}:\x1b[0m`) + unresolvedExampleReferences.forEach(item => { + console.error(` - ${item}`) + }) + console.error() + } + + const unusedExampleReferences = new Set(Object.keys(exampleRefs)) + usedExamples.forEach(item => { + unusedExampleReferences.delete(item) + }) + + if (unusedExampleReferences.size > 0) { + console.error(`\x1b[33;1m⚠️ WARNING: unused examples in ${API_EXAMPLES_CONFIG_FILE}:\x1b[0m`) + unusedExampleReferences.forEach(item => { + console.error(` - ${item}`) + }) + console.error() + } + + } catch (error) { + if (error instanceof Error) { + console.error(`Error occured while post-processing OpenAPI: ${error.message}`) + } else { + console.error("Unknown error occurred") + } + } +}) diff --git a/services/groupware/apidoc.yml b/services/groupware/apidoc.yml new file mode 100644 index 0000000000..0becebd89e --- /dev/null +++ b/services/groupware/apidoc.yml @@ -0,0 +1,79 @@ +servers: + - url: https://localhost:9200/ + description: Local Development Server +tags: + - name: bootstrap + x-displayName: Bootstrapping + description: Initialization APIs + - name: account + x-displayName: Accounts + description: APIs for accounts + - name: identity + x-displayName: Identities + description: APIs for identities + - name: mailbox + x-displayName: Mailboxes + description: APIs that pertain to mailboxes + - name: email + x-displayName: Emails + description: APIs about emails + - name: addressbook + x-displayName: Address Books + description: APIs about address books + - name: contact + x-displayName: Contacts + description: APIs about contacts + - name: calendar + x-displayName: Calendars + description: APIs about calendars + - name: event + x-displayName: Events + description: APIs about calendar events + - name: tasklist + x-displayName: TaskLists + description: APIs about task lists + - name: task + x-displayName: Tasks + description: APIs about tasks + - name: quota + x-displayName: Quota + description: APIs about quotas + - name: vacation + x-displayName: Vacation Responses + description: APIs about vacation responses +x-tagGroups: + - name: Bootstrapping + tags: + - bootstrap + - name: Accounts + tags: + - account + - name: Emails + tags: + - identity + - mailbox + - email + - vacation + - name: Contacts + tags: + - addressbook + - contact + - name: Events + tags: + - calendar + - event + - name: Tasks + tags: + - tasklist + - task + - name: Quotas + tags: + - quota +components: + securitySchemes: + api: + description: Authentication for API Calls + type: openIdConnect + openIdConnectUrl: https://keycloak.opencloud.test/realms/openCloud/.well-known/openid-configuration + security: + - api diff --git a/services/groupware/cmd/groupware/main.go b/services/groupware/cmd/groupware/main.go new file mode 100644 index 0000000000..ef1507eb49 --- /dev/null +++ b/services/groupware/cmd/groupware/main.go @@ -0,0 +1,19 @@ +package main + +import ( + "context" + "os" + "os/signal" + "syscall" + + "github.com/opencloud-eu/opencloud/services/groupware/pkg/command" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config/defaults" +) + +func main() { + cfg := defaults.DefaultConfig() + cfg.Context, _ = signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT, syscall.SIGHUP) + if err := command.Execute(cfg); err != nil { + os.Exit(1) + } +} diff --git a/services/groupware/demo-principals.yaml b/services/groupware/demo-principals.yaml new file mode 100644 index 0000000000..9c2c2a2689 --- /dev/null +++ b/services/groupware/demo-principals.yaml @@ -0,0 +1,23 @@ +alan: + quota: 20GB +lynn: + quota: 5GB +mary: + quota: 500MB +programmers: + type: group + description: Programmers + emails: + - programmers@example.org + members: + - alan + - lynn +news: + type: list + description: "Get your daily science news" + members: + - alan + - lynn + emails: + - news@example.org + diff --git a/services/groupware/favicon.png b/services/groupware/favicon.png new file mode 100644 index 0000000000..b41cfa5c0a Binary files /dev/null and b/services/groupware/favicon.png differ diff --git a/services/groupware/package.json b/services/groupware/package.json new file mode 100644 index 0000000000..ddeb4cc960 --- /dev/null +++ b/services/groupware/package.json @@ -0,0 +1,15 @@ +{ + "dependencies": { + "@redocly/cli": "^2.4.0", + "@types/js-yaml": "^4.0.9", + "cheerio": "^1.1.2", + "js-yaml": "^4.1.0", + "ts-node": "^10.9.2", + "typescript": "^5.9.2" + }, + "packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34", + "type": "module", + "devDependencies": { + "@types/node": "^24.3.1" + } +} diff --git a/services/groupware/pkg/command/root.go b/services/groupware/pkg/command/root.go new file mode 100644 index 0000000000..aa13cb6a5a --- /dev/null +++ b/services/groupware/pkg/command/root.go @@ -0,0 +1,27 @@ +package command + +import ( + "os" + + "github.com/opencloud-eu/opencloud/pkg/clihelper" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" + "github.com/urfave/cli/v2" +) + +// GetCommands provides all commands for this service +func GetCommands(cfg *config.Config) cli.Commands { + return []*cli.Command{ + Server(cfg), + Version(cfg), + } +} + +func Execute(cfg *config.Config) error { + app := clihelper.DefaultApp(&cli.App{ + Name: "groupware", + Usage: "Groupware service for OpenCloud", + Commands: GetCommands(cfg), + }) + + return app.RunContext(cfg.Context, os.Args) +} diff --git a/services/groupware/pkg/command/server.go b/services/groupware/pkg/command/server.go new file mode 100644 index 0000000000..4af34fa62b --- /dev/null +++ b/services/groupware/pkg/command/server.go @@ -0,0 +1,88 @@ +package command + +import ( + "context" + "fmt" + + "github.com/oklog/run" + "github.com/opencloud-eu/opencloud/pkg/config/configlog" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config/parser" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/logging" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/server/debug" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/server/http" + "github.com/urfave/cli/v2" +) + +// Server is the entrypoint for the server command. +func Server(cfg *config.Config) *cli.Command { + return &cli.Command{ + Name: "server", + Usage: fmt.Sprintf("start the %s service without runtime (unsupervised mode)", cfg.Service.Name), + Category: "server", + Before: func(_ *cli.Context) error { + return configlog.ReturnFatal(parser.ParseConfig(cfg)) + }, + Action: func(c *cli.Context) error { + logger := logging.Configure(cfg.Service.Name, cfg.Log) + + var ( + gr = run.Group{} + ctx, cancel = context.WithCancel(c.Context) + m = metrics.NewHttpMetrics() + ) + + defer cancel() + + server, err := debug.Server( + debug.Logger(logger), + debug.Config(cfg), + debug.Context(ctx), + ) + if err != nil { + logger.Info().Err(err).Str("transport", "debug").Msg("Failed to initialize server") + return err + } + + gr.Add(server.ListenAndServe, func(_ error) { + _ = server.Shutdown(ctx) + cancel() + }) + + httpServer, err := http.Server( + http.Logger(logger), + http.Context(ctx), + http.Config(cfg), + http.Metrics(m), + http.Namespace(cfg.HTTP.Namespace), + ) + if err != nil { + logger.Info(). + Err(err). + Str("transport", "http"). + Msg("Failed to initialize server") + + return err + } + + gr.Add(httpServer.Run, func(_ error) { + if err == nil { + logger.Info(). + Str("transport", "http"). + Str("server", cfg.Service.Name). + Msg("Shutting down server") + } else { + logger.Error().Err(err). + Str("transport", "http"). + Str("server", cfg.Service.Name). + Msg("Shutting down server") + } + + cancel() + }) + + return gr.Run() + }, + } +} diff --git a/services/groupware/pkg/command/version.go b/services/groupware/pkg/command/version.go new file mode 100644 index 0000000000..b335cb2f12 --- /dev/null +++ b/services/groupware/pkg/command/version.go @@ -0,0 +1,26 @@ +package command + +import ( + "fmt" + + "github.com/opencloud-eu/opencloud/pkg/version" + + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" + "github.com/urfave/cli/v2" +) + +// Version prints the service versions of all running instances. +func Version(cfg *config.Config) *cli.Command { + return &cli.Command{ + Name: "version", + Usage: "print the version of this binary and the running service instances", + Category: "info", + Action: func(c *cli.Context) error { + fmt.Println("Version: " + version.GetString()) + fmt.Printf("Compiled: %s\n", version.Compiled()) + fmt.Println("") + + return nil + }, + } +} diff --git a/services/groupware/pkg/config/config.go b/services/groupware/pkg/config/config.go new file mode 100644 index 0000000000..e20b8ed6a2 --- /dev/null +++ b/services/groupware/pkg/config/config.go @@ -0,0 +1,49 @@ +package config + +import ( + "context" + "time" + + "github.com/opencloud-eu/opencloud/pkg/shared" +) + +// Config combines all available configuration parts. +type Config struct { + Commons *shared.Commons `yaml:"-"` // don't use this directly as configuration for a service + + Service Service `yaml:"-"` + + Log *Log `yaml:"log"` + Debug Debug `yaml:"debug"` + + HTTP HTTP `yaml:"http"` + + Mail Mail `yaml:"mail"` + + TokenManager *TokenManager `yaml:"token_manager"` + + Context context.Context `yaml:"-"` +} + +type MailMasterAuth struct { + Username string `yaml:"username" env:"GROUPWARE_JMAP_MASTER_USERNAME" desc:"The username to use for master authentication for JMAP operations." introductionVersion:"4.0.0"` + Password string `yaml:"password" env:"GROUPWARE_JMAP_MASTER_PASSWORD" desc:"The clear text password to use for master authentication for JMAP operations." introductionVersion:"4.0.0"` +} + +type MailSessionCache struct { + MaxCapacity int `yaml:"max_capacity" env:"GROUPWARE_SESSION_CACHE_MAX_CAPACITY" desc:"The maximum capacity of the JMAP session cache." introductionVersion:"4.0.0"` + Ttl time.Duration `yaml:"ttl" env:"GROUPWARE_SESSION_CACHE_TTL" desc:"The time-to-live of cached successfully obtained JMAP sessions." introductionVersion:"4.0.0"` + FailureTtl time.Duration `yaml:"failure_ttl" env:"GROUPWARE_SESSION_FAILURE_CACHE_TTL" desc:"The time-to-live of cached JMAP session retrieval failures." introductionVersion:"4.0.0"` +} + +type Mail struct { + Master MailMasterAuth `yaml:"master"` + BaseUrl string `yaml:"base_url" env:"GROUPWARE_JMAP_BASE_URL" desc:"The base fully-qualified URL to the JMAP server." introductionVersion:"4.0.0"` + Timeout time.Duration `yaml:"timeout" env:"GROUPWARE_JMAP_TIMEOUT" desc:"The timeout for JMAP HTTP operations." introductionVersion:"4.0.0"` + DefaultEmailLimit uint `yaml:"default_email_limit" env:"GROUPWARE_DEFAULT_EMAIL_LIMIT" desc:"The default email retrieval page size." introductionVersion:"4.0.0"` + MaxBodyValueBytes uint `yaml:"max_body_value_bytes" env:"GROUPWARE_MAX_BODY_VALUE_BYTES" desc:"The maximum size when retrieving email bodies from the JMAP server." introductionVersion:"4.0.0"` + DefaultContactLimit uint `yaml:"default_contact_limit" env:"GROUPWARE_DEFAULT_CONTACT_LIMIT" desc:"The default contacts retrieval page size." introductionVersion:"4.0.0"` + ResponseHeaderTimeout time.Duration `yaml:"response_header_timeout" env:"GROUPWARE_RESPONSE_HEADER_TIMEOUT" desc:"The timeout when waiting for JMAP response headers." introductionVersion:"4.0.0"` + PushHandshakeTimeout time.Duration `yaml:"push_handshake_timeout" env:"GROUPWARE_PUSH_HANDSHAKE_TIMEOUT" desc:"The timeout when performing Websocket handshakes with the JMAP server." introductionVersion:"4.0.0"` + SessionCache MailSessionCache `yaml:"session_cache"` +} diff --git a/services/groupware/pkg/config/debug.go b/services/groupware/pkg/config/debug.go new file mode 100644 index 0000000000..9215b21837 --- /dev/null +++ b/services/groupware/pkg/config/debug.go @@ -0,0 +1,9 @@ +package config + +// Debug defines the available debug configuration. +type Debug struct { + Addr string `yaml:"addr" env:"GROUPWARE_DEBUG_ADDR" desc:"Bind address of the debug server, where metrics, health, config and debug endpoints will be exposed." introductionVersion:"1.0.0"` + Token string `yaml:"token" env:"GROUPWARE_DEBUG_TOKEN" desc:"Token to secure the metrics endpoint." introductionVersion:"1.0.0"` + Pprof bool `yaml:"pprof" env:"GROUPWARE_DEBUG_PPROF" desc:"Enables pprof, which can be used for profiling." introductionVersion:"1.0.0"` + Zpages bool `yaml:"zpages" env:"GROUPWARE_DEBUG_ZPAGES" desc:"Enables zpages, which can be used for collecting and viewing in-memory traces." introductionVersion:"1.0.0"` +} diff --git a/services/groupware/pkg/config/defaults/defaultconfig.go b/services/groupware/pkg/config/defaults/defaultconfig.go new file mode 100644 index 0000000000..042ed81a5d --- /dev/null +++ b/services/groupware/pkg/config/defaults/defaultconfig.go @@ -0,0 +1,92 @@ +package defaults + +import ( + "strings" + "time" + + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" +) + +// FullDefaultConfig returns a fully initialized default configuration +func FullDefaultConfig() *config.Config { + cfg := DefaultConfig() + EnsureDefaults(cfg) + Sanitize(cfg) + return cfg +} + +// DefaultConfig returns a basic default configuration +func DefaultConfig() *config.Config { + return &config.Config{ + Debug: config.Debug{ + Addr: "127.0.0.1:9292", + Token: "", + Pprof: false, + Zpages: false, + }, + Mail: config.Mail{ + Master: config.MailMasterAuth{ + Username: "master", + Password: "admin", + }, + BaseUrl: "https://stalwart.opencloud.test", + Timeout: 30 * time.Second, + DefaultEmailLimit: uint(0), + MaxBodyValueBytes: uint(0), + DefaultContactLimit: uint(0), + ResponseHeaderTimeout: 10 * time.Second, + PushHandshakeTimeout: 10 * time.Second, + SessionCache: config.MailSessionCache{ + Ttl: 5 * time.Minute, + FailureTtl: 15 * time.Second, + MaxCapacity: 10_000, + }, + }, + HTTP: config.HTTP{ + Addr: "127.0.0.1:9276", + Root: "/groupware", + Namespace: "eu.opencloud.web", + CORS: config.CORS{ + AllowedOrigins: []string{"*"}, + AllowedMethods: []string{"GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS", "REPORT"}, + AllowedHeaders: []string{"Authorization", "Origin", "Content-Type", "Accept", "X-Requested-With", "X-Request-Id", "Trace-Id", "Cache-Control"}, + AllowCredentials: true, + }, + }, + Service: config.Service{ + Name: "groupware", + }, + } +} + +// EnsureDefaults adds default values to the configuration if they are not set yet +func EnsureDefaults(cfg *config.Config) { + // provide with defaults for shared logging, since we need a valid destination address for "envdecode". + if cfg.Log == nil && cfg.Commons != nil && cfg.Commons.Log != nil { + cfg.Log = &config.Log{ + Level: cfg.Commons.Log.Level, + Pretty: cfg.Commons.Log.Pretty, + Color: cfg.Commons.Log.Color, + File: cfg.Commons.Log.File, + } + } else if cfg.Log == nil { + cfg.Log = &config.Log{} + } + if cfg.TokenManager == nil && cfg.Commons != nil && cfg.Commons.TokenManager != nil { + cfg.TokenManager = &config.TokenManager{ + JWTSecret: cfg.Commons.TokenManager.JWTSecret, + } + } else if cfg.TokenManager == nil { + cfg.TokenManager = &config.TokenManager{} + } + if cfg.Commons != nil { + cfg.HTTP.TLS = cfg.Commons.HTTPServiceTLS + } +} + +// Sanitize sanitized the configuration +func Sanitize(cfg *config.Config) { + if cfg.HTTP.Root != "/" { + cfg.HTTP.Root = strings.TrimSuffix(cfg.HTTP.Root, "/") + } +} diff --git a/services/groupware/pkg/config/http.go b/services/groupware/pkg/config/http.go new file mode 100644 index 0000000000..2907f26601 --- /dev/null +++ b/services/groupware/pkg/config/http.go @@ -0,0 +1,20 @@ +package config + +import "github.com/opencloud-eu/opencloud/pkg/shared" + +// CORS defines the available cors configuration. +type CORS struct { + AllowedOrigins []string `yaml:"allow_origins" env:"OC_CORS_ALLOW_ORIGINS;GROUPWARE_CORS_ALLOW_ORIGINS" desc:"A list of allowed CORS origins. See following chapter for more details: *Access-Control-Allow-Origin* at https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin. See the Environment Variable Types description for more details." introductionVersion:"1.0.0"` + AllowedMethods []string `yaml:"allow_methods" env:"OC_CORS_ALLOW_METHODS;GROUPWARE_CORS_ALLOW_METHODS" desc:"A list of allowed CORS methods. See following chapter for more details: *Access-Control-Request-Method* at https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Request-Method. See the Environment Variable Types description for more details." introductionVersion:"1.0.0"` + AllowedHeaders []string `yaml:"allow_headers" env:"OC_CORS_ALLOW_HEADERS;GROUPWARE_CORS_ALLOW_HEADERS" desc:"A list of allowed CORS headers. See following chapter for more details: *Access-Control-Request-Headers* at https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Request-Headers. See the Environment Variable Types description for more details." introductionVersion:"1.0.0"` + AllowCredentials bool `yaml:"allow_credentials" env:"OC_CORS_ALLOW_CREDENTIALS;GROUPWARE_CORS_ALLOW_CREDENTIALS" desc:"Allow credentials for CORS.See following chapter for more details: *Access-Control-Allow-Credentials* at https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials." introductionVersion:"1.0.0"` +} + +// HTTP defines the available http configuration. +type HTTP struct { + Addr string `yaml:"addr" env:"GROUPWARE_HTTP_ADDR" desc:"The bind address of the HTTP service." introductionVersion:"1.0.0"` + TLS shared.HTTPServiceTLS `yaml:"tls"` + Root string `yaml:"root" env:"GROUPWARE_HTTP_ROOT" desc:"Subdirectory that serves as the root for this HTTP service." introductionVersion:"1.0.0"` + Namespace string `yaml:"-"` + CORS CORS `yaml:"cors"` +} diff --git a/services/groupware/pkg/config/log.go b/services/groupware/pkg/config/log.go new file mode 100644 index 0000000000..c9464300ce --- /dev/null +++ b/services/groupware/pkg/config/log.go @@ -0,0 +1,9 @@ +package config + +// Log defines the available log configuration. +type Log struct { + Level string `mapstructure:"level" env:"OC_LOG_LEVEL;GROUPWARE_LOG_LEVEL" desc:"The log level. Valid values are: 'panic', 'fatal', 'error', 'warn', 'info', 'debug', 'trace'." introductionVersion:"1.0.0"` + Pretty bool `mapstructure:"pretty" env:"OC_LOG_PRETTY;GROUPWARE_LOG_PRETTY" desc:"Activates pretty log output." introductionVersion:"1.0.0"` + Color bool `mapstructure:"color" env:"OC_LOG_COLOR;GROUPWARE_LOG_COLOR" desc:"Activates colorized log output." introductionVersion:"1.0.0"` + File string `mapstructure:"file" env:"OC_LOG_FILE;GROUPWARE_LOG_FILE" desc:"The path to the log file. Activates logging to this file if set." introductionVersion:"1.0.0"` +} diff --git a/services/groupware/pkg/config/parser/parse.go b/services/groupware/pkg/config/parser/parse.go new file mode 100644 index 0000000000..61f1e0dd4f --- /dev/null +++ b/services/groupware/pkg/config/parser/parse.go @@ -0,0 +1,44 @@ +package parser + +import ( + "errors" + + occfg "github.com/opencloud-eu/opencloud/pkg/config" + "github.com/opencloud-eu/opencloud/pkg/shared" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config/defaults" + + "github.com/opencloud-eu/opencloud/pkg/config/envdecode" +) + +// ParseConfig loads configuration from known paths. +func ParseConfig(cfg *config.Config) error { + err := occfg.BindSourcesToStructs(cfg.Service.Name, cfg) + if err != nil { + return err + } + + defaults.EnsureDefaults(cfg) + + // load all env variables relevant to the config in the current context. + if err := envdecode.Decode(cfg); err != nil { + // no environment variable set for this config is an expected "error" + if !errors.Is(err, envdecode.ErrNoTargetFieldsAreSet) { + return err + } + } + + // sanitize config + defaults.Sanitize(cfg) + + return Validate(cfg) +} + +// Validate can validate the configuration +func Validate(cfg *config.Config) error { + if cfg.TokenManager.JWTSecret == "" { + return shared.MissingJWTTokenError(cfg.Service.Name) + } + + return nil +} diff --git a/services/groupware/pkg/config/reva.go b/services/groupware/pkg/config/reva.go new file mode 100644 index 0000000000..a357bdf548 --- /dev/null +++ b/services/groupware/pkg/config/reva.go @@ -0,0 +1,6 @@ +package config + +// TokenManager is the config for using the reva token manager +type TokenManager struct { + JWTSecret string `yaml:"jwt_secret" env:"OC_JWT_SECRET;GROUPWARE_JWT_SECRET" desc:"The secret to mint and validate jwt tokens."` +} diff --git a/services/groupware/pkg/config/service.go b/services/groupware/pkg/config/service.go new file mode 100644 index 0000000000..d1eac383f0 --- /dev/null +++ b/services/groupware/pkg/config/service.go @@ -0,0 +1,6 @@ +package config + +// Service defines the available service configuration. +type Service struct { + Name string `yaml:"-"` +} diff --git a/services/groupware/pkg/groupware/groupware_api.go b/services/groupware/pkg/groupware/groupware_api.go new file mode 100644 index 0000000000..44ed201447 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api.go @@ -0,0 +1,19 @@ +package groupware + +const ( + Version = "0.0.1" +) + +const ( + CapMail_1 = "mail:1" +) + +var Capabilities = []string{ + CapMail_1, +} + +const ( + RelationEntityEmail = "email" + RelationTypeSameThread = "same-thread" + RelationTypeSameSender = "same-sender" +) diff --git a/services/groupware/pkg/groupware/groupware_api_account.go b/services/groupware/pkg/groupware/groupware_api_account.go new file mode 100644 index 0000000000..52b12aa94d --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_account.go @@ -0,0 +1,141 @@ +package groupware + +import ( + "net/http" + "slices" + "strings" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +// When the request succeeds. +// swagger:response GetAccountResponse200 +type SwaggerGetAccountResponse struct { + // in: body + Body struct { + *jmap.Account + } +} + +// swagger:route GET /groupware/accounts/{account} account account +// Get attributes of a given account. +// +// responses: +// +// 200: GetAccountResponse200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetAccount(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, account, err := req.GetAccountForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + return etagResponse(single(accountId), account, req.session.State, AccountResponseObjectType, jmap.State(req.session.State), "") + }) +} + +// When the request succeeds. +// swagger:response GetAccountsResponse200 +type SwaggerGetAccountsResponse struct { + // in: body + Body map[string]jmap.Account +} + +// swagger:route GET /groupware/accounts account accounts +// Get the list of all of the user's accounts. +// +// responses: +// +// 200: GetAccountsResponse200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetAccounts(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + list := make([]AccountWithId, len(req.session.Accounts)) + i := 0 + for accountId, account := range req.session.Accounts { + list[i] = AccountWithId{ + AccountId: accountId, + Account: account, + } + i++ + } + // sort on accountId to have a stable order that remains the same with every query + slices.SortFunc(list, func(a, b AccountWithId) int { return strings.Compare(a.AccountId, b.AccountId) }) + return etagResponse(structs.Map(list, func(a AccountWithId) string { return a.AccountId }), list, req.session.State, AccountResponseObjectType, jmap.State(req.session.State), "") + }) +} + +func (g *Groupware) GetAccountsWithTheirIdentities(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + allAccountIds := req.AllAccountIds() + resp, sessionState, state, lang, err := g.jmap.GetIdentitiesForAllAccounts(allAccountIds, req.session, req.ctx, req.logger, req.language()) + if err != nil { + return req.errorResponseFromJmap(allAccountIds, err) + } + list := make([]AccountWithIdAndIdentities, len(req.session.Accounts)) + i := 0 + for accountId, account := range req.session.Accounts { + identities, ok := resp.Identities[accountId] + if !ok { + identities = []jmap.Identity{} + } + slices.SortFunc(identities, func(a, b jmap.Identity) int { return strings.Compare(a.Id, b.Id) }) + list[i] = AccountWithIdAndIdentities{ + AccountId: accountId, + Account: account, + Identities: identities, + } + i++ + } + // sort on accountId to have a stable order that remains the same with every query + slices.SortFunc(list, func(a, b AccountWithIdAndIdentities) int { return strings.Compare(a.AccountId, b.AccountId) }) + return etagResponse(structs.Map(list, func(a AccountWithIdAndIdentities) string { return a.AccountId }), list, sessionState, AccountResponseObjectType, state, lang) + }) +} + +type AccountWithId struct { + AccountId string `json:"accountId,omitempty"` + jmap.Account +} + +type AccountWithIdAndIdentities struct { + AccountId string `json:"accountId,omitempty"` + jmap.Account + Identities []jmap.Identity `json:"identities,omitempty"` +} + +type AccountBootstrapResponse struct { + // The API version. + Version string `json:"version"` + + // A list of capabilities of this API version. + Capabilities []string `json:"capabilities"` + + // API limits. + Limits IndexLimits `json:"limits"` + + // Accounts that are available to the user. + // + // The key of the mapis the identifier. + Accounts map[string]IndexAccount `json:"accounts"` + + // Primary accounts for usage types. + PrimaryAccounts IndexPrimaryAccounts `json:"primaryAccounts"` + + // Mailboxes. + Mailboxes map[string][]jmap.Mailbox `json:"mailboxes"` +} + +// When the request suceeds. +// swagger:response GetAccountBootstrapResponse200 +type SwaggerAccountBootstrapResponse struct { + // in: body + Body struct { + *AccountBootstrapResponse + } +} diff --git a/services/groupware/pkg/groupware/groupware_api_blob.go b/services/groupware/pkg/groupware/groupware_api_blob.go new file mode 100644 index 0000000000..b733abf136 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_blob.go @@ -0,0 +1,133 @@ +package groupware + +import ( + "fmt" + "io" + "net/http" + "strconv" + + "github.com/go-chi/chi/v5" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +const ( + DefaultBlobDownloadType = "application/octet-stream" +) + +func (g *Groupware) GetBlobMeta(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForBlob() + if err != nil { + return errorResponse(single(accountId), err) + } + l := req.logger.With().Str(logAccountId, accountId) + + blobId := chi.URLParam(req.r, UriParamBlobId) + if blobId == "" { + return req.parameterErrorResponse(single(accountId), UriParamBlobId, fmt.Sprintf("Invalid value for path parameter '%v': empty", UriParamBlobId)) + } + l = l.Str(UriParamBlobId, blobId) + + logger := log.From(l) + + res, sessionState, state, lang, jerr := g.jmap.GetBlobMetadata(accountId, req.session, req.ctx, logger, req.language(), blobId) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + blob := res + if blob == nil { + return notFoundResponse(single(accountId), sessionState) + } + return etagResponse(single(accountId), res, sessionState, BlobResponseObjectType, state, lang) + }) +} + +func (g *Groupware) UploadBlob(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + contentType := r.Header.Get("Content-Type") + body := r.Body + if body != nil { + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + req.logger.Error().Err(err).Msg("failed to close response body") + } + }(body) + } + + accountId, err := req.GetAccountIdForBlob() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + resp, lang, jerr := g.jmap.UploadBlobStream(accountId, req.session, req.ctx, logger, req.language(), contentType, body) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return response(single(accountId), resp, req.session.State, lang) + }) +} + +func (g *Groupware) DownloadBlob(w http.ResponseWriter, r *http.Request) { + g.stream(w, r, func(req Request, w http.ResponseWriter) *Error { + blobId := chi.URLParam(req.r, UriParamBlobId) + name := chi.URLParam(req.r, UriParamBlobName) + q := req.r.URL.Query() + typ := q.Get(QueryParamBlobType) + + accountId, gwerr := req.GetAccountIdForBlob() + if gwerr != nil { + return gwerr + } + logger := log.From(req.logger.With().Str(logAccountId, accountId).Str(UriParamBlobId, blobId)) + + return req.serveBlob(blobId, name, typ, logger, accountId, w) + }) +} + +func (r *Request) serveBlob(blobId string, name string, typ string, logger *log.Logger, accountId string, w http.ResponseWriter) *Error { + if typ == "" { + typ = DefaultBlobDownloadType + } + blob, lang, jerr := r.g.jmap.DownloadBlobStream(accountId, blobId, name, typ, r.session, r.ctx, logger, r.language()) + if blob != nil && blob.Body != nil { + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + logger.Error().Err(err).Msg("failed to close response body") + } + }(blob.Body) + } + if jerr != nil { + return r.apiErrorFromJmap(jerr) + } + if blob == nil { + w.WriteHeader(http.StatusNotFound) + return nil + } + + if blob.Type != "" { + w.Header().Add("Content-Type", blob.Type) + } + if blob.CacheControl != "" { + w.Header().Add("Cache-Control", blob.CacheControl) + } + if blob.ContentDisposition != "" { + w.Header().Add("Content-Disposition", blob.ContentDisposition) + } + if blob.Size >= 0 { + w.Header().Add("Content-Size", strconv.Itoa(blob.Size)) + } + if lang != "" { + w.Header().Add("Content-Language", string(lang)) + } + + _, err := io.Copy(w, blob.Body) + if err != nil { + return r.observedParameterError(ErrorStreamingResponse) + } + + return nil +} diff --git a/services/groupware/pkg/groupware/groupware_api_calendars.go b/services/groupware/pkg/groupware/groupware_api_calendars.go new file mode 100644 index 0000000000..8524066d26 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_calendars.go @@ -0,0 +1,235 @@ +package groupware + +import ( + "net/http" + "strings" + + "github.com/go-chi/chi/v5" + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// When the request succeeds. +// swagger:response GetCalendars200 +type SwaggerGetCalendars200 struct { + // in: body + Body []jmap.Calendar +} + +// swagger:route GET /groupware/accounts/{account}/calendars calendar calendars +// Get all calendars of an account. +// +// responses: +// +// 200: GetCalendars200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetCalendars(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needCalendarWithAccount() + if !ok { + return resp + } + + calendars, sessionState, state, lang, jerr := g.jmap.GetCalendars(accountId, req.session, req.ctx, req.logger, req.language(), nil) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), calendars, sessionState, CalendarResponseObjectType, state, lang) + }) +} + +// When the request succeeds. +// swagger:response GetCalendarById200 +type SwaggerGetCalendarById200 struct { + // in: body + Body struct { + *jmap.Calendar + } +} + +// swagger:route GET /groupware/accounts/{account}/calendars/{calendarid} calendar calendar_by_id +// Get a calendar of an account by its identifier. +// +// responses: +// +// 200: GetCalendarById200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetCalendarById(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needCalendarWithAccount() + if !ok { + return resp + } + + l := req.logger.With() + + calendarId := chi.URLParam(r, UriParamCalendarId) + l = l.Str(UriParamCalendarId, log.SafeString(calendarId)) + + logger := log.From(l) + calendars, sessionState, state, lang, jerr := g.jmap.GetCalendars(accountId, req.session, req.ctx, logger, req.language(), []string{calendarId}) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if len(calendars.NotFound) > 0 { + return notFoundResponse(single(accountId), sessionState) + } else { + return etagResponse(single(accountId), calendars.Calendars[0], sessionState, CalendarResponseObjectType, state, lang) + } + }) +} + +// When the request succeeds. +// swagger:response GetEventsInCalendar200 +type SwaggerGetEventsInCalendar200 struct { + // in: body + Body []jmap.CalendarEvent +} + +// swagger:route GET /groupware/accounts/{account}/calendars/{calendarid}/events event events_in_addressbook +// Get all the events in a calendar of an account by its identifier. +// +// responses: +// +// 200: GetEventsInCalendar200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetEventsInCalendar(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needCalendarWithAccount() + if !ok { + return resp + } + + l := req.logger.With() + + calendarId := chi.URLParam(r, UriParamCalendarId) + l = l.Str(UriParamCalendarId, log.SafeString(calendarId)) + + offset, ok, err := req.parseUIntParam(QueryParamOffset, 0) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint(QueryParamOffset, offset) + } + + limit, ok, err := req.parseUIntParam(QueryParamLimit, g.defaults.contactLimit) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint(QueryParamLimit, limit) + } + + filter := jmap.CalendarEventFilterCondition{ + InCalendar: calendarId, + } + sortBy := []jmap.CalendarEventComparator{{Property: jmap.CalendarEventPropertyUpdated, IsAscending: false}} + + logger := log.From(l) + eventsByAccountId, sessionState, state, lang, jerr := g.jmap.QueryCalendarEvents(single(accountId), req.session, req.ctx, logger, req.language(), filter, sortBy, offset, limit) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if events, ok := eventsByAccountId[accountId]; ok { + return etagResponse(single(accountId), events, sessionState, EventResponseObjectType, state, lang) + } else { + return notFoundResponse(single(accountId), sessionState) + } + }) +} + +func (g *Groupware) CreateCalendarEvent(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needCalendarWithAccount() + if !ok { + return resp + } + + l := req.logger.With() + + calendarId := chi.URLParam(r, UriParamCalendarId) + l = l.Str(UriParamCalendarId, log.SafeString(calendarId)) + + var create jmap.CalendarEvent + err := req.body(&create) + if err != nil { + return errorResponse(single(accountId), err) + } + + logger := log.From(l) + created, sessionState, state, lang, jerr := g.jmap.CreateCalendarEvent(accountId, req.session, req.ctx, logger, req.language(), create) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + return etagResponse(single(accountId), created, sessionState, EventResponseObjectType, state, lang) + }) +} + +func (g *Groupware) DeleteCalendarEvent(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needCalendarWithAccount() + if !ok { + return resp + } + l := req.logger.With().Str(accountId, log.SafeString(accountId)) + + calendarId := chi.URLParam(r, UriParamCalendarId) + eventId := chi.URLParam(r, UriParamEventId) + l.Str(UriParamCalendarId, log.SafeString(calendarId)).Str(UriParamEventId, log.SafeString(eventId)) + + logger := log.From(l) + + deleted, sessionState, state, _, jerr := g.jmap.DeleteCalendarEvent(accountId, []string{eventId}, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + for _, e := range deleted { + desc := e.Description + if desc != "" { + return errorResponseWithSessionState(single(accountId), apiError( + req.errorId(), + ErrorFailedToDeleteContact, + withDetail(e.Description), + ), sessionState) + } else { + return errorResponseWithSessionState(single(accountId), apiError( + req.errorId(), + ErrorFailedToDeleteContact, + ), sessionState) + } + } + return noContentResponseWithEtag(single(accountId), sessionState, EventResponseObjectType, state) + }) +} + +func (g *Groupware) ParseIcalBlob(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForBlob() + if err != nil { + return errorResponse(single(accountId), err) + } + + blobId := chi.URLParam(r, UriParamBlobId) + + blobIds := strings.Split(blobId, ",") + l := req.logger.With().Array(UriParamBlobId, log.SafeStringArray(blobIds)) + logger := log.From(l) + + resp, sessionState, state, lang, jerr := g.jmap.ParseICalendarBlob(accountId, req.session, req.ctx, logger, req.language(), blobIds) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + return etagResponse(single(accountId), resp, sessionState, EventResponseObjectType, state, lang) + }) +} diff --git a/services/groupware/pkg/groupware/groupware_api_contacts.go b/services/groupware/pkg/groupware/groupware_api_contacts.go new file mode 100644 index 0000000000..9cebd7ed58 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_contacts.go @@ -0,0 +1,239 @@ +package groupware + +import ( + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/jscontact" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// When the request succeeds. +// swagger:response GetAddressbooks200 +type SwaggerGetAddressbooks200 struct { + // in: body + Body []jmap.AddressBook +} + +// swagger:route GET /groupware/accounts/{account}/addressbooks addressbook addressbooks +// Get all addressbooks of an account. +// +// responses: +// +// 200: GetAddressbooks200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetAddressbooks(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needContactWithAccount() + if !ok { + return resp + } + + addressbooks, sessionState, state, lang, jerr := g.jmap.GetAddressbooks(accountId, req.session, req.ctx, req.logger, req.language(), nil) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), addressbooks, sessionState, AddressBookResponseObjectType, state, lang) + }) +} + +// When the request succeeds. +// swagger:response GetAddressbookById200 +type SwaggerGetAddressbookById200 struct { + // in: body + Body struct { + *jmap.AddressBook + } +} + +// swagger:route GET /groupware/accounts/{account}/addressbooks/{addressbookid} addressbook addressbook_by_id +// Get an addressbook of an account by its identifier. +// +// responses: +// +// 200: GetAddressbookById200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetAddressbook(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needContactWithAccount() + if !ok { + return resp + } + + l := req.logger.With() + + addressBookId := chi.URLParam(r, UriParamAddressBookId) + l = l.Str(UriParamAddressBookId, log.SafeString(addressBookId)) + + logger := log.From(l) + addressbooks, sessionState, state, lang, jerr := g.jmap.GetAddressbooks(accountId, req.session, req.ctx, logger, req.language(), []string{addressBookId}) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if len(addressbooks.NotFound) > 0 { + return notFoundResponse(single(accountId), sessionState) + } else { + return etagResponse(single(accountId), addressbooks, sessionState, AddressBookResponseObjectType, state, lang) + } + }) +} + +// When the request succeeds. +// swagger:response GetContactsInAddressbook200 +type SwaggerGetContactsInAddressbook200 struct { + // in: body + Body []jscontact.ContactCard +} + +// swagger:route GET /groupware/accounts/{account}/addressbooks/{addressbookid}/contacts contact contacts_in_addressbook +// Get all the contacts in an addressbook of an account by its identifier. +// +// responses: +// +// 200: GetContactsInAddressbook200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetContactsInAddressbook(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needContactWithAccount() + if !ok { + return resp + } + + l := req.logger.With() + + addressBookId := chi.URLParam(r, UriParamAddressBookId) + l = l.Str(UriParamAddressBookId, log.SafeString(addressBookId)) + + offset, ok, err := req.parseUIntParam(QueryParamOffset, 0) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint(QueryParamOffset, offset) + } + + limit, ok, err := req.parseUIntParam(QueryParamLimit, g.defaults.contactLimit) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint(QueryParamLimit, limit) + } + + filter := jmap.ContactCardFilterCondition{ + InAddressBook: addressBookId, + } + sortBy := []jmap.ContactCardComparator{{Property: jscontact.ContactCardPropertyUpdated, IsAscending: false}} + + logger := log.From(l) + contactsByAccountId, sessionState, state, lang, jerr := g.jmap.QueryContactCards(single(accountId), req.session, req.ctx, logger, req.language(), filter, sortBy, offset, limit) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if contacts, ok := contactsByAccountId[accountId]; ok { + return etagResponse(single(accountId), contacts, sessionState, ContactResponseObjectType, state, lang) + } else { + return etagNotFoundResponse(single(accountId), sessionState, ContactResponseObjectType, state, lang) + } + }) +} + +func (g *Groupware) GetContactById(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needContactWithAccount() + if !ok { + return resp + } + + l := req.logger.With() + + contactId := chi.URLParam(r, UriParamContactId) + l = l.Str(UriParamContactId, log.SafeString(contactId)) + + logger := log.From(l) + contactsById, sessionState, state, lang, jerr := g.jmap.GetContactCardsById(accountId, req.session, req.ctx, logger, req.language(), []string{contactId}) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if contact, ok := contactsById[contactId]; ok { + return etagResponse(single(accountId), contact, sessionState, ContactResponseObjectType, state, lang) + } else { + return etagNotFoundResponse(single(accountId), sessionState, ContactResponseObjectType, state, lang) + } + }) +} + +func (g *Groupware) CreateContact(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needContactWithAccount() + if !ok { + return resp + } + + l := req.logger.With() + + addressBookId := chi.URLParam(r, UriParamAddressBookId) + l = l.Str(UriParamAddressBookId, log.SafeString(addressBookId)) + + var create jscontact.ContactCard + err := req.body(&create) + if err != nil { + return errorResponse(single(accountId), err) + } + + logger := log.From(l) + created, sessionState, state, lang, jerr := g.jmap.CreateContactCard(accountId, req.session, req.ctx, logger, req.language(), create) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + return etagResponse(single(accountId), created, sessionState, ContactResponseObjectType, state, lang) + }) +} + +func (g *Groupware) DeleteContact(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needContactWithAccount() + if !ok { + return resp + } + l := req.logger.With().Str(accountId, log.SafeString(accountId)) + + contactId := chi.URLParam(r, UriParamContactId) + l.Str(UriParamContactId, log.SafeString(contactId)) + + logger := log.From(l) + + deleted, sessionState, state, _, jerr := g.jmap.DeleteContactCard(accountId, []string{contactId}, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + for _, e := range deleted { + desc := e.Description + if desc != "" { + return errorResponseWithSessionState(single(accountId), apiError( + req.errorId(), + ErrorFailedToDeleteContact, + withDetail(e.Description), + ), sessionState) + } else { + return errorResponseWithSessionState(single(accountId), apiError( + req.errorId(), + ErrorFailedToDeleteContact, + ), sessionState) + } + } + return noContentResponseWithEtag(single(accountId), sessionState, ContactResponseObjectType, state) + }) +} diff --git a/services/groupware/pkg/groupware/groupware_api_emails.go b/services/groupware/pkg/groupware/groupware_api_emails.go new file mode 100644 index 0000000000..454831f9ff --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_emails.go @@ -0,0 +1,1882 @@ +package groupware + +import ( + "context" + "fmt" + "io" + "mime" + "net/http" + "slices" + "strconv" + "strings" + "time" + + "github.com/go-chi/chi/v5" + "github.com/microcosm-cc/bluemonday" + "github.com/rs/zerolog" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" +) + +// When the request succeeds without a "since" query parameter. +// swagger:response GetAllEmailsInMailbox200 +type SwaggerGetAllEmailsInMailbox200 struct { + // in: body + Body struct { + *jmap.Emails + } +} + +// When the request succeeds with a "since" query parameter. +// swagger:response GetAllEmailsInMailboxSince200 +type SwaggerGetAllEmailsInMailboxSince200 struct { + // in: body + Body struct { + *jmap.MailboxChanges + } +} + +// swagger:route GET /groupware/accounts/{account}/mailboxes/{mailbox}/emails email get_all_emails_in_mailbox +// Get all the emails in a mailbox. +// +// Retrieve the list of all the emails that are in a given mailbox. +// +// The mailbox must be specified by its id, as part of the request URL path. +// +// A limit and an offset may be specified using the query parameters 'limit' and 'offset', +// respectively. +// +// When the query parameter 'since' or the 'if-none-match' header is specified, then the +// request behaves differently, performing a changes query to determine what has changed in +// that mailbox since a given state identifier. +// +// responses: +// +// 200: GetAllEmailsInMailbox200 +// 200: GetAllEmailsInMailboxSince200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetAllEmailsInMailbox(w http.ResponseWriter, r *http.Request) { + mailboxId := chi.URLParam(r, UriParamMailboxId) + since := r.Header.Get(HeaderSince) + + if since != "" { + // ... then it's a completely different operation + maxChanges := uint(0) + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + + if mailboxId == "" { + return req.parameterErrorResponse(single(accountId), UriParamMailboxId, fmt.Sprintf("Missing required mailbox ID path parameter '%v'", UriParamMailboxId)) + } + + logger := log.From(req.logger.With().Str(HeaderSince, log.SafeString(since)).Str(logAccountId, log.SafeString(accountId))) + + changes, sessionState, state, lang, jerr := g.jmap.GetMailboxChanges(accountId, req.session, req.ctx, logger, req.language(), mailboxId, since, true, g.config.maxBodyValueBytes, maxChanges) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), changes, sessionState, EmailResponseObjectType, state, lang) + }) + } else { + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l = l.Str(logAccountId, accountId) + + if mailboxId == "" { + return req.parameterErrorResponse(single(accountId), UriParamMailboxId, fmt.Sprintf("Missing required mailbox ID path parameter '%v'", UriParamMailboxId)) + } + + offset, ok, err := req.parseIntParam(QueryParamOffset, 0) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Int(QueryParamOffset, offset) + } + + limit, ok, err := req.parseUIntParam(QueryParamLimit, g.defaults.emailLimit) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint(QueryParamLimit, limit) + } + + logger := log.From(l) + + collapseThreads := false + fetchBodies := false + withThreads := true + + emails, sessionState, state, lang, jerr := g.jmap.GetAllEmailsInMailbox(accountId, req.session, req.ctx, logger, req.language(), mailboxId, offset, limit, collapseThreads, fetchBodies, g.config.maxBodyValueBytes, withThreads) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + sanitized, err := req.sanitizeEmails(emails.Emails) + if err != nil { + return errorResponseWithSessionState(single(accountId), err, sessionState) + } + + safe := jmap.Emails{ + Emails: sanitized, + Total: emails.Total, + Limit: emails.Limit, + Offset: emails.Offset, + } + + return etagResponse(single(accountId), safe, sessionState, EmailResponseObjectType, state, lang) + }) + } +} + +func (g *Groupware) GetEmailsById(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, UriParamEmailId) + ids := strings.Split(id, ",") + + accept := r.Header.Get("Accept") + if accept == "message/rfc822" { + g.stream(w, r, func(req Request, w http.ResponseWriter) *Error { + if len(ids) != 1 { + return req.parameterError(UriParamEmailId, fmt.Sprintf("when the Accept header is set to '%s', the API only supports serving a single email id", accept)) + } + + accountId, err := req.GetAccountIdForMail() + if err != nil { + return err + } + + _, ok, err := req.parseBoolParam(QueryParamMarkAsSeen, false) + if err != nil { + return err + } + if ok { + return req.parameterError(QueryParamMarkAsSeen, fmt.Sprintf("when the Accept header is set to '%s', the API does not support setting %s", accept, QueryParamMarkAsSeen)) + } + + logger := log.From(req.logger.With().Str(logAccountId, log.SafeString(accountId)).Str("id", log.SafeString(id)).Str("accept", log.SafeString(accept))) + + blobId, _, _, _, jerr := g.jmap.GetEmailBlobId(accountId, req.session, req.ctx, logger, req.language(), id) + if jerr != nil { + return req.apiErrorFromJmap(req.observeJmapError(jerr)) + } + if blobId == "" { + return nil + } else { + name := blobId + ".eml" + typ := accept + accountId, gwerr := req.GetAccountIdForBlob() + if gwerr != nil { + return gwerr + } + return req.serveBlob(blobId, name, typ, logger, accountId, w) + } + }) + } else { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l := req.logger.With().Str(logAccountId, log.SafeString(accountId)) + + if len(ids) < 1 { + return req.parameterErrorResponse(single(accountId), UriParamEmailId, fmt.Sprintf("Invalid value for path parameter '%v': '%s': %s", UriParamEmailId, log.SafeString(id), "empty list of mail ids")) + } + + markAsSeen, ok, err := req.parseBoolParam(QueryParamMarkAsSeen, false) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Bool(QueryParamMarkAsSeen, markAsSeen) + } + + if len(ids) == 1 { + logger := log.From(l.Str("id", log.SafeString(id))) + + emails, _, sessionState, state, lang, jerr := g.jmap.GetEmails(accountId, req.session, req.ctx, logger, req.language(), ids, true, g.config.maxBodyValueBytes, markAsSeen, true) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + if len(emails) < 1 { + return notFoundResponse(single(accountId), sessionState) + } else { + sanitized, err := req.sanitizeEmail(emails[0]) + if err != nil { + return errorResponseWithSessionState(single(accountId), err, sessionState) + } + return etagResponse(single(accountId), sanitized, sessionState, EmailResponseObjectType, state, lang) + } + } else { + logger := log.From(l.Array("ids", log.SafeStringArray(ids))) + + emails, _, sessionState, state, lang, jerr := g.jmap.GetEmails(accountId, req.session, req.ctx, logger, req.language(), ids, true, g.config.maxBodyValueBytes, markAsSeen, false) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + if len(emails) < 1 { + return notFoundResponse(single(accountId), sessionState) + } else { + sanitized, err := req.sanitizeEmails(emails) + if err != nil { + return errorResponseWithSessionState(single(accountId), err, sessionState) + } + return etagResponse(single(accountId), sanitized, sessionState, EmailResponseObjectType, state, lang) + } + } + }) + } +} + +func (g *Groupware) GetEmailAttachments(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, UriParamEmailId) + + contextAppender := func(l zerolog.Context) zerolog.Context { return l } + q := r.URL.Query() + var attachmentSelector func(jmap.EmailBodyPart) bool = nil + if q.Has(QueryParamPartId) { + partId := q.Get(QueryParamPartId) + attachmentSelector = func(part jmap.EmailBodyPart) bool { return part.PartId == partId } + contextAppender = func(l zerolog.Context) zerolog.Context { return l.Str(QueryParamPartId, log.SafeString(partId)) } + } + if q.Has(QueryParamAttachmentName) { + name := q.Get(QueryParamAttachmentName) + attachmentSelector = func(part jmap.EmailBodyPart) bool { return part.Name == name } + contextAppender = func(l zerolog.Context) zerolog.Context { return l.Str(QueryParamAttachmentName, log.SafeString(name)) } + } + if q.Has(QueryParamAttachmentBlobId) { + blobId := q.Get(QueryParamAttachmentBlobId) + attachmentSelector = func(part jmap.EmailBodyPart) bool { return part.BlobId == blobId } + contextAppender = func(l zerolog.Context) zerolog.Context { + return l.Str(QueryParamAttachmentBlobId, log.SafeString(blobId)) + } + } + + if attachmentSelector == nil { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l := req.logger.With().Str(logAccountId, log.SafeString(accountId)) + logger := log.From(l) + emails, _, sessionState, state, lang, jerr := g.jmap.GetEmails(accountId, req.session, req.ctx, logger, req.language(), []string{id}, false, 0, false, false) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + if len(emails) < 1 { + return notFoundResponse(single(accountId), sessionState) + } + email, err := req.sanitizeEmail(emails[0]) + if err != nil { + return errorResponseWithSessionState(single(accountId), err, sessionState) + } + return etagResponse(single(accountId), email.Attachments, sessionState, EmailResponseObjectType, state, lang) + }) + } else { + g.stream(w, r, func(req Request, w http.ResponseWriter) *Error { + mailAccountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return gwerr + } + blobAccountId, gwerr := req.GetAccountIdForBlob() + if gwerr != nil { + return gwerr + } + + l := req.logger.With().Str(logAccountId, log.SafeString(mailAccountId)).Str(logBlobAccountId, log.SafeString(blobAccountId)) + l = contextAppender(l) + logger := log.From(l) + + emails, _, _, _, lang, jerr := g.jmap.GetEmails(mailAccountId, req.session, req.ctx, logger, req.language(), []string{id}, false, 0, false, false) + if jerr != nil { + return req.apiErrorFromJmap(req.observeJmapError(jerr)) + } + if len(emails) < 1 { + return nil + } + + email, err := req.sanitizeEmail(emails[0]) + if err != nil { + return err + } + var attachment *jmap.EmailBodyPart = nil + for _, part := range email.Attachments { + if attachmentSelector(part) { + attachment = &part + break + } + } + if attachment == nil { + return nil + } + + blob, lang, jerr := g.jmap.DownloadBlobStream(blobAccountId, attachment.BlobId, attachment.Name, attachment.Type, req.session, req.ctx, logger, req.language()) + if blob != nil && blob.Body != nil { + defer func(Body io.ReadCloser) { + err := Body.Close() + if err != nil { + logger.Error().Err(err).Msg("failed to close response body") + } + }(blob.Body) + } + if jerr != nil { + return req.apiErrorFromJmap(jerr) + } + if blob == nil { + w.WriteHeader(http.StatusNotFound) + return nil + } + + if blob.Type != "" { + w.Header().Add("Content-Type", blob.Type) + } + if blob.CacheControl != "" { + w.Header().Add("Cache-Control", blob.CacheControl) + } + if blob.ContentDisposition != "" { + w.Header().Add("Content-Disposition", blob.ContentDisposition) + } + if blob.Size >= 0 { + w.Header().Add("Content-Size", strconv.Itoa(blob.Size)) + } + if lang != "" { + w.Header().Add("Content-Language", string(lang)) + } + _, cerr := io.Copy(w, blob.Body) + if cerr != nil { + return req.observedParameterError(ErrorStreamingResponse) + } + + return nil + }) + } +} + +func (g *Groupware) getEmailsSince(w http.ResponseWriter, r *http.Request, since jmap.State) { + g.respond(w, r, func(req Request) Response { + l := req.logger.With().Str(QueryParamSince, log.SafeString(string(since))) + + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l = l.Str(logAccountId, log.SafeString(accountId)) + + maxChanges, ok, err := req.parseUIntParam(QueryParamMaxChanges, 0) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint(QueryParamMaxChanges, maxChanges) + } + + logger := log.From(l) + + changes, sessionState, state, lang, jerr := g.jmap.GetEmailsSince(accountId, req.session, req.ctx, logger, req.language(), since, true, g.config.maxBodyValueBytes, maxChanges) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), changes, sessionState, EmailResponseObjectType, state, lang) + }) +} + +type EmailSearchSnippetsResults struct { + Results []Snippet `json:"results,omitempty"` + Total uint `json:"total,omitzero"` + Limit uint `json:"limit,omitzero"` + QueryState jmap.State `json:"queryState,omitempty"` +} + +type EmailWithSnippets struct { + AccountId string `json:"accountId,omitempty"` + jmap.Email + Snippets []SnippetWithoutEmailId `json:"snippets,omitempty"` +} + +type Snippet struct { + AccountId string `json:"accountId,omitempty"` + jmap.SearchSnippetWithMeta +} + +type SnippetWithoutEmailId struct { + Subject string `json:"subject,omitempty"` + Preview string `json:"preview,omitempty"` +} + +type EmailWithSnippetsSearchResults struct { + Results []EmailWithSnippets `json:"results"` + Total uint `json:"total,omitzero"` + Limit uint `json:"limit,omitzero"` + QueryState jmap.State `json:"queryState,omitempty"` +} + +type EmailSearchResults struct { + Results []jmap.Email `json:"results"` + Total uint `json:"total,omitzero"` + Limit uint `json:"limit,omitzero"` + QueryState jmap.State `json:"queryState,omitempty"` +} + +func (g *Groupware) buildFilter(req Request) (bool, jmap.EmailFilterElement, bool, int, uint, *log.Logger, *Error) { + q := req.r.URL.Query() + mailboxId := q.Get(QueryParamMailboxId) + notInMailboxIds := q[QueryParamNotInMailboxId] + text := q.Get(QueryParamSearchText) + from := q.Get(QueryParamSearchFrom) + to := q.Get(QueryParamSearchTo) + cc := q.Get(QueryParamSearchCc) + bcc := q.Get(QueryParamSearchBcc) + subject := q.Get(QueryParamSearchSubject) + body := q.Get(QueryParamSearchBody) + keywords := q[QueryParamSearchKeyword] + messageId := q.Get(QueryParamSearchMessageId) + + snippets := false + + l := req.logger.With() + + offset, ok, err := req.parseIntParam(QueryParamOffset, 0) + if err != nil { + return false, nil, snippets, 0, 0, nil, err + } + if ok { + l = l.Int(QueryParamOffset, offset) + } + + limit, ok, err := req.parseUIntParam(QueryParamLimit, g.defaults.emailLimit) + if err != nil { + return false, nil, snippets, 0, 0, nil, err + } + if ok { + l = l.Uint(QueryParamLimit, limit) + } + + before, ok, err := req.parseDateParam(QueryParamSearchBefore) + if err != nil { + return false, nil, snippets, 0, 0, nil, err + } + if ok { + l = l.Time(QueryParamSearchBefore, before) + } + + after, ok, err := req.parseDateParam(QueryParamSearchAfter) + if err != nil { + return false, nil, snippets, 0, 0, nil, err + } + if ok { + l = l.Time(QueryParamSearchAfter, after) + } + + if mailboxId != "" { + l = l.Str(QueryParamMailboxId, log.SafeString(mailboxId)) + } + if len(notInMailboxIds) > 0 { + l = l.Array(QueryParamNotInMailboxId, log.SafeStringArray(notInMailboxIds)) + } + if text != "" { + l = l.Str(QueryParamSearchText, log.SafeString(text)) + } + if from != "" { + l = l.Str(QueryParamSearchFrom, log.SafeString(from)) + } + if to != "" { + l = l.Str(QueryParamSearchTo, log.SafeString(to)) + } + if cc != "" { + l = l.Str(QueryParamSearchCc, log.SafeString(cc)) + } + if bcc != "" { + l = l.Str(QueryParamSearchBcc, log.SafeString(bcc)) + } + if subject != "" { + l = l.Str(QueryParamSearchSubject, log.SafeString(subject)) + } + if body != "" { + l = l.Str(QueryParamSearchBody, log.SafeString(body)) + } + if messageId != "" { + l = l.Str(QueryParamSearchMessageId, log.SafeString(messageId)) + } + + minSize, ok, err := req.parseIntParam(QueryParamSearchMinSize, 0) + if err != nil { + return false, nil, snippets, 0, 0, nil, err + } + if ok { + l = l.Int(QueryParamSearchMinSize, minSize) + } + + maxSize, ok, err := req.parseIntParam(QueryParamSearchMaxSize, 0) + if err != nil { + return false, nil, snippets, 0, 0, nil, err + } + if ok { + l = l.Int(QueryParamSearchMaxSize, maxSize) + } + + logger := log.From(l) + + var filter jmap.EmailFilterElement + + firstFilter := jmap.EmailFilterCondition{ + Text: text, + InMailbox: mailboxId, + InMailboxOtherThan: notInMailboxIds, + From: from, + To: to, + Cc: cc, + Bcc: bcc, + Subject: subject, + Body: body, + Before: before, + After: after, + MinSize: minSize, + MaxSize: maxSize, + Header: []string{}, + } + if messageId != "" { + // The array MUST contain either one or two elements. + // The first element is the name of the header field to match against. + // The second (optional) element is the text to look for in the header field value. + // If not supplied, the message matches simply if it has a header field of the given name. + firstFilter.Header = []string{"Message-ID", messageId} + } + filter = &firstFilter + + if text != "" || subject != "" || body != "" { + snippets = true + } + + if len(keywords) > 0 { + firstFilter.HasKeyword = keywords[0] + if len(keywords) > 1 { + firstFilter.HasKeyword = keywords[0] + filters := make([]jmap.EmailFilterElement, len(keywords)-1) + for i, keyword := range keywords[1:] { + filters[i] = jmap.EmailFilterCondition{HasKeyword: keyword} + } + filter = &jmap.EmailFilterOperator{ + Operator: jmap.And, + Conditions: filters, + } + } + } + + return true, filter, snippets, offset, limit, logger, nil +} + +func (g *Groupware) GetEmails(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + since := q.Get(QueryParamSince) + if since == "" { + since = r.Header.Get(HeaderSince) + } + if since != "" { + // get email changes since a given state + g.getEmailsSince(w, r, jmap.State(since)) + } else { + // do a search + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + + ok, filter, makesSnippets, offset, limit, logger, err := g.buildFilter(req) + if !ok { + return errorResponse(single(accountId), err) + } + logger = log.From(req.logger.With().Str(logAccountId, log.SafeString(accountId))) + + if !filter.IsNotEmpty() { + filter = nil + } + + fetchBodies := false + + resultsByAccount, sessionState, state, lang, jerr := g.jmap.QueryEmailsWithSnippets(single(accountId), filter, req.session, req.ctx, logger, req.language(), offset, limit, fetchBodies, g.config.maxBodyValueBytes) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if results, ok := resultsByAccount[accountId]; ok { + flattened := make([]EmailWithSnippets, len(results.Results)) + for i, result := range results.Results { + var snippets []SnippetWithoutEmailId + if makesSnippets { + snippets := make([]SnippetWithoutEmailId, len(result.Snippets)) + for j, snippet := range result.Snippets { + snippets[j] = SnippetWithoutEmailId{ + Subject: snippet.Subject, + Preview: snippet.Preview, + } + } + } else { + snippets = nil + } + sanitized, err := req.sanitizeEmail(result.Email) + if err != nil { + return errorResponseWithSessionState(single(accountId), err, sessionState) + } + flattened[i] = EmailWithSnippets{ + Email: sanitized, + Snippets: snippets, + } + } + + return etagResponse(single(accountId), EmailWithSnippetsSearchResults{ + Results: flattened, + Total: results.Total, + Limit: results.Limit, + QueryState: results.QueryState, + }, sessionState, EmailResponseObjectType, state, lang) + } else { + return notFoundResponse(single(accountId), sessionState) + } + }) + } +} + +func (g *Groupware) GetEmailsForAllAccounts(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + allAccountIds := req.AllAccountIds() + + ok, filter, makesSnippets, offset, limit, logger, err := g.buildFilter(req) + if !ok { + return errorResponse(allAccountIds, err) + } + logger = log.From(req.logger.With().Array(logAccountId, log.SafeStringArray(allAccountIds))) + + if !filter.IsNotEmpty() { + filter = nil + } + + if makesSnippets { + resultsByAccountId, sessionState, state, lang, jerr := g.jmap.QueryEmailSnippets(allAccountIds, filter, req.session, req.ctx, logger, req.language(), offset, limit) + if jerr != nil { + return req.errorResponseFromJmap(allAccountIds, jerr) + } + + var totalOverAllAccounts uint = 0 + total := 0 + for _, results := range resultsByAccountId { + totalOverAllAccounts += results.Total + total += len(results.Snippets) + } + + flattened := make([]Snippet, total) + { + i := 0 + for accountId, results := range resultsByAccountId { + for _, result := range results.Snippets { + flattened[i] = Snippet{ + AccountId: accountId, + SearchSnippetWithMeta: result, + } + } + } + } + + slices.SortFunc(flattened, func(a, b Snippet) int { return a.ReceivedAt.Compare(b.ReceivedAt) }) + + // TODO offset and limit over the aggregated results by account + + return etagResponse(allAccountIds, EmailSearchSnippetsResults{ + Results: flattened, + Total: totalOverAllAccounts, + Limit: limit, + QueryState: state, + }, sessionState, EmailResponseObjectType, state, lang) + } else { + withThreads := true + + resultsByAccountId, sessionState, state, lang, jerr := g.jmap.QueryEmailSummaries(allAccountIds, req.session, req.ctx, logger, req.language(), filter, limit, withThreads) + if jerr != nil { + return req.errorResponseFromJmap(allAccountIds, jerr) + } + + var totalAcrossAllAccounts uint = 0 + total := 0 + for _, results := range resultsByAccountId { + totalAcrossAllAccounts += results.Total + total += len(results.Emails) + } + + flattened := make([]jmap.Email, total) + { + i := 0 + for accountId, results := range resultsByAccountId { + for _, result := range results.Emails { + result.AccountId = accountId + flattened[i] = result + i++ + } + } + } + + slices.SortFunc(flattened, func(a, b jmap.Email) int { return a.ReceivedAt.Compare(b.ReceivedAt) }) + + // TODO offset and limit over the aggregated results by account + + return etagResponse(allAccountIds, EmailSearchResults{ + Results: flattened, + Total: totalAcrossAllAccounts, + Limit: limit, + QueryState: state, + }, sessionState, EmailResponseObjectType, state, lang) + } + }) +} + +var draftEmailAutoMailboxRolePrecedence = []string{ + jmap.JmapMailboxRoleDrafts, // we want draft emails to be created in the Mailbox with the drafts role + jmap.JmapMailboxRoleInbox, // but if there is none, we will use the Mailbox with the inbox role instead +} + +func findDraftsMailboxId(j *jmap.Client, accountId string, req Request, logger *log.Logger) (string, Response) { + mailboxIdsPerAccountIds, _, _, _, jerr := j.SearchMailboxIdsPerRole(single(accountId), req.session, req.ctx, logger, req.language(), draftEmailAutoMailboxRolePrecedence) + if jerr != nil { + return "", req.errorResponseFromJmap(single(accountId), jerr) + } else { + for _, role := range draftEmailAutoMailboxRolePrecedence { + if mailboxId, ok := mailboxIdsPerAccountIds[accountId][role]; ok { + return mailboxId, Response{} + } + } + // couldn't find a Mailbox with the drafts role for that account, + // we have to return an error... ? + return "", errorResponse(single(accountId), apiError(req.errorId(), ErrorNoMailboxWithDraftRole)) + } +} + +var sentEmailAutoMailboxRolePrecedence = []string{ + jmap.JmapMailboxRoleSent, // we want sent emails to be created in the Mailbox with the sent role + jmap.JmapMailboxRoleInbox, // but if there is none, we will use the Mailbox with the inbox role instead +} + +var draftAndSentMailboxRoles = structs.Uniq(structs.Concat(draftEmailAutoMailboxRolePrecedence, sentEmailAutoMailboxRolePrecedence)) + +func findSentMailboxId(j *jmap.Client, accountId string, req Request, logger *log.Logger) (string, string, Response) { + mailboxIdsPerAccountIds, _, _, _, jerr := j.SearchMailboxIdsPerRole(single(accountId), req.session, req.ctx, logger, req.language(), draftAndSentMailboxRoles) + if jerr != nil { + return "", "", req.errorResponseFromJmap(single(accountId), jerr) + } else { + sentMailboxId := "" + for _, role := range sentEmailAutoMailboxRolePrecedence { + if mailboxId, ok := mailboxIdsPerAccountIds[accountId][role]; ok { + sentMailboxId = mailboxId + break + } + } + if sentMailboxId == "" { + return "", "", errorResponse(single(accountId), apiError(req.errorId(), ErrorNoMailboxWithSentRole)) + } + draftsMailboxId := "" + for _, role := range draftEmailAutoMailboxRolePrecedence { + if mailboxId, ok := mailboxIdsPerAccountIds[accountId][role]; ok { + draftsMailboxId = mailboxId + break + } + } + if draftsMailboxId == "" { + return "", "", errorResponse(single(accountId), apiError(req.errorId(), ErrorNoMailboxWithDraftRole)) + } + return draftsMailboxId, sentMailboxId, Response{} + } +} + +func (g *Groupware) CreateEmail(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + logger := req.logger + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + logger = log.From(logger.With().Str(logAccountId, log.SafeString(accountId))) + + var body jmap.EmailCreate + err := req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + + if len(body.MailboxIds) < 1 { + mailboxId, resp := findDraftsMailboxId(g.jmap, accountId, req, logger) + if mailboxId != "" { + body.MailboxIds[mailboxId] = true + } else { + return resp + } + } + + created, sessionState, state, lang, jerr := g.jmap.CreateEmail(accountId, body, "", req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), created, sessionState, EmailResponseObjectType, state, lang) + }) +} + +func (g *Groupware) ReplaceEmail(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + logger := req.logger + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + + replaceId := chi.URLParam(r, UriParamEmailId) + + logger = log.From(logger.With().Str(logAccountId, log.SafeString(accountId))) + + var body jmap.EmailCreate + err := req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + + if len(body.MailboxIds) < 1 { + mailboxId, resp := findDraftsMailboxId(g.jmap, accountId, req, logger) + if mailboxId != "" { + body.MailboxIds[mailboxId] = true + } else { + return resp + } + } + + created, sessionState, state, lang, jerr := g.jmap.CreateEmail(accountId, body, replaceId, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), created, sessionState, EmailResponseObjectType, state, lang) + }) +} + +// swagger:parameters update_email +type SwaggerUpdateEmailBody struct { + // List of identifiers of emails to delete. + // in: body + // example: ["caen3iujoo8u", "aec8phaetaiz", "bohna0me"] + Body map[string]string +} + +func (g *Groupware) UpdateEmail(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + emailId := chi.URLParam(r, UriParamEmailId) + + l := req.logger.With() + l.Str(UriParamEmailId, log.SafeString(emailId)) + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l.Str(logAccountId, accountId) + + logger := log.From(l) + + var body map[string]any + err := req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + + updates := map[string]jmap.EmailUpdate{ + emailId: body, + } + + result, sessionState, state, lang, jerr := g.jmap.UpdateEmails(accountId, updates, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if result == nil { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Missing Email Update Response", + "An internal API behaved unexpectedly: missing Email update response from JMAP endpoint"))) + } + updatedEmail, ok := result[emailId] + if !ok { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Wrong Email Update Response ID", + "An internal API behaved unexpectedly: wrong Email update ID response from JMAP endpoint"))) + } + + return etagResponse(single(accountId), updatedEmail, sessionState, EmailResponseObjectType, state, lang) + }) +} + +type emailKeywordUpdates struct { + Add []string `json:"add,omitempty"` + Remove []string `json:"remove,omitempty"` +} + +func (e emailKeywordUpdates) IsEmpty() bool { + return len(e.Add) == 0 && len(e.Remove) == 0 +} + +func (g *Groupware) UpdateEmailKeywords(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + emailId := chi.URLParam(r, UriParamEmailId) + + l := req.logger.With() + l.Str(UriParamEmailId, log.SafeString(emailId)) + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l.Str(logAccountId, accountId) + + logger := log.From(l) + + var body emailKeywordUpdates + err := req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + + if body.IsEmpty() { + return noContentResponse(single(accountId), req.session.State) + } + + patch := jmap.EmailUpdate{} + for _, keyword := range body.Add { + patch["keywords/"+keyword] = true + } + for _, keyword := range body.Remove { + patch["keywords/"+keyword] = nil + } + patches := map[string]jmap.EmailUpdate{ + emailId: patch, + } + + result, sessionState, state, lang, jerr := g.jmap.UpdateEmails(accountId, patches, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if result == nil { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Missing Email Update Response", + "An internal API behaved unexpectedly: missing Email update response from JMAP endpoint"))) + } + updatedEmail, ok := result[emailId] + if !ok { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Wrong Email Update Response ID", + "An internal API behaved unexpectedly: wrong Email update ID response from JMAP endpoint"))) + } + + return etagResponse(single(accountId), updatedEmail, sessionState, EmailResponseObjectType, state, lang) + }) +} + +// swagger:route POST /groupware/accounts/{account}/emails/{emailid}/keywords email add_email_keywords +// Add keywords to an email by its unique identifier. +// +// responses: +// +// 204: Success204 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) AddEmailKeywords(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + emailId := chi.URLParam(r, UriParamEmailId) + + l := req.logger.With() + l.Str(UriParamEmailId, log.SafeString(emailId)) + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l.Str(logAccountId, accountId) + + logger := log.From(l) + + var body []string + err := req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + + if len(body) < 1 { + return noContentResponse(single(accountId), req.session.State) + } + + patch := jmap.EmailUpdate{} + for _, keyword := range body { + patch["keywords/"+keyword] = true + } + patches := map[string]jmap.EmailUpdate{ + emailId: patch, + } + + result, sessionState, state, lang, jerr := g.jmap.UpdateEmails(accountId, patches, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if result == nil { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Missing Email Update Response", + "An internal API behaved unexpectedly: missing Email update response from JMAP endpoint"))) + } + updatedEmail, ok := result[emailId] + if !ok { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Wrong Email Update Response ID", + "An internal API behaved unexpectedly: wrong Email update ID response from JMAP endpoint"))) + } + + if updatedEmail == nil { + return noContentResponseWithEtag(single(accountId), sessionState, EmailResponseObjectType, state) + } else { + return etagResponse(single(accountId), updatedEmail, sessionState, EmailResponseObjectType, state, lang) + } + }) +} + +// swagger:route DELETE /groupware/accounts/{account}/emails/{emailid}/keywords email remove_email_keywords +// Remove keywords of an email by its unique identifier. +// +// responses: +// +// 204: Success204 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) RemoveEmailKeywords(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + emailId := chi.URLParam(r, UriParamEmailId) + + l := req.logger.With() + l.Str(UriParamEmailId, log.SafeString(emailId)) + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l.Str(logAccountId, accountId) + + logger := log.From(l) + + var body []string + err := req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + + if len(body) < 1 { + return noContentResponse(single(accountId), req.session.State) + } + + patch := jmap.EmailUpdate{} + for _, keyword := range body { + patch["keywords/"+keyword] = nil + } + patches := map[string]jmap.EmailUpdate{ + emailId: patch, + } + + result, sessionState, state, lang, jerr := g.jmap.UpdateEmails(accountId, patches, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if result == nil { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Missing Email Update Response", + "An internal API behaved unexpectedly: missing Email update response from JMAP endpoint"))) + } + updatedEmail, ok := result[emailId] + if !ok { + return errorResponse(single(accountId), apiError(req.errorId(), ErrorApiInconsistency, withTitle("API Inconsistency: Wrong Email Update Response ID", + "An internal API behaved unexpectedly: wrong Email update ID response from JMAP endpoint"))) + } + + if updatedEmail == nil { + return noContentResponseWithEtag(single(accountId), sessionState, EmailResponseObjectType, state) + } else { + return etagResponse(single(accountId), updatedEmail, sessionState, EmailResponseObjectType, state, lang) + } + }) +} + +// swagger:route DELETE /groupware/accounts/{account}/emails/{emailid} email delete_email +// Delete an email by its unique identifier. +// +// responses: +// +// 204: Success204 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) DeleteEmail(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + emailId := chi.URLParam(r, UriParamEmailId) + + l := req.logger.With() + l.Str(UriParamEmailId, emailId) + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l.Str(logAccountId, accountId) + + logger := log.From(l) + + resp, sessionState, state, _, jerr := g.jmap.DeleteEmails(accountId, []string{emailId}, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + for _, e := range resp { + desc := e.Description + if desc != "" { + return errorResponseWithSessionState(single(accountId), apiError( + req.errorId(), + ErrorFailedToDeleteEmail, + withDetail(e.Description), + ), sessionState) + } else { + return errorResponseWithSessionState(single(accountId), apiError( + req.errorId(), + ErrorFailedToDeleteEmail, + ), sessionState) + } + } + return noContentResponseWithEtag(single(accountId), sessionState, EmailResponseObjectType, state) + }) +} + +// swagger:parameters delete_emails +type SwaggerDeleteEmailsBody struct { + // List of identifiers of emails to delete. + // in: body + // example: ["caen3iujoo8u", "aec8phaetaiz", "bohna0me"] + Body []string +} + +// swagger:route DELETE /groupware/accounts/{account}/emails email delete_emails +// Delete a set of emails by their unique identifiers. +// +// The identifiers of the emails to delete are specified as part of the request +// body, as an array of strings. +// +// responses: +// +// 204: Success204 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) DeleteEmails(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l.Str(logAccountId, accountId) + + var emailIds []string + err := req.body(&emailIds) + if err != nil { + return errorResponse(single(accountId), err) + } + + l.Array("emailIds", log.SafeStringArray(emailIds)) + + logger := log.From(l) + + resp, sessionState, state, _, jerr := g.jmap.DeleteEmails(accountId, emailIds, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if len(resp) > 0 { + meta := make(map[string]any, len(resp)) + for emailId, e := range resp { + meta[emailId] = e.Description + } + return errorResponseWithSessionState(single(accountId), apiError( + req.errorId(), + ErrorFailedToDeleteEmail, + withMeta(meta), + ), sessionState) + } + return noContentResponseWithEtag(single(accountId), sessionState, EmailResponseObjectType, state) + }) +} + +func (g *Groupware) SendEmail(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l.Str(logAccountId, accountId) + + emailId := chi.URLParam(r, UriParamEmailId) + l.Str(UriParamEmailId, log.SafeString(emailId)) + + identityId, err := req.getMandatoryStringParam(QueryParamIdentityId) + if err != nil { + return errorResponse(single(accountId), err) + } + l.Str(QueryParamIdentityId, log.SafeString(identityId)) + + var move *jmap.MoveMail = nil + { + moveFromMailboxId, _ := req.getStringParam(QueryParamMoveFromMailboxId, "") + moveToMailboxId, _ := req.getStringParam(QueryParamMoveToMailboxId, "") + + if moveFromMailboxId == "" || moveToMailboxId == "" { + draftsMailboxId, sentMailboxId, resp := findSentMailboxId(g.jmap, accountId, req, req.logger) + if draftsMailboxId != "" && sentMailboxId != "" { + if moveFromMailboxId == "" { + moveFromMailboxId = draftsMailboxId + } + if moveToMailboxId == "" { + moveToMailboxId = sentMailboxId + } + } else { + return resp + } + } + + // TODO some parameter to prevent moving the sent email from one Mailbox to another? + + move = &jmap.MoveMail{FromMailboxId: moveFromMailboxId, ToMailboxId: moveToMailboxId} + l.Str(QueryParamMoveFromMailboxId, log.SafeString(moveFromMailboxId)).Str(QueryParamMoveToMailboxId, log.SafeString(moveFromMailboxId)) + } + + logger := log.From(l) + + resp, sessionState, state, lang, jerr := g.jmap.SubmitEmail(accountId, identityId, emailId, move, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), resp, sessionState, EmailResponseObjectType, state, lang) + }) +} + +type AboutEmailsEvent struct { + Id string `json:"id"` + Source string `json:"source"` + Emails []jmap.Email `json:"emails"` + Language jmap.Language `json:"lang"` +} + +type AboutEmailResponse struct { + Email jmap.Email `json:"email"` + RequestId string `json:"requestId"` + Language jmap.Language `json:"lang"` +} + +func relatedEmailsFilter(email jmap.Email, beacon time.Time, days uint) jmap.EmailFilterElement { + filters := []jmap.EmailFilterElement{} + for _, from := range email.From { + if from.Email != "" { + filters = append(filters, jmap.EmailFilterCondition{From: from.Email}) + } + } + for _, sender := range email.Sender { + if sender.Email != "" { + filters = append(filters, jmap.EmailFilterCondition{From: sender.Email}) + } + } + + timeFilter := jmap.EmailFilterCondition{ + Before: beacon.Add(time.Duration(days) * time.Hour * 24), + After: beacon.Add(time.Duration(-days) * time.Hour * 24), + } + + var filter jmap.EmailFilterElement + if len(filters) > 0 { + filter = jmap.EmailFilterOperator{ + Operator: jmap.And, + Conditions: []jmap.EmailFilterElement{ + timeFilter, + jmap.EmailFilterOperator{ + Operator: jmap.Or, + Conditions: filters, + }, + }, + } + } else { + filter = timeFilter + } + + return filter +} + +func (g *Groupware) RelatedToEmail(w http.ResponseWriter, r *http.Request) { + id := chi.URLParam(r, UriParamEmailId) + + g.respond(w, r, func(req Request) Response { + l := req.logger.With().Str(logEmailId, log.SafeString(id)) + + accountId, gwerr := req.GetAccountIdForMail() + if gwerr != nil { + return errorResponse(single(accountId), gwerr) + } + l = l.Str(logAccountId, log.SafeString(accountId)) + + limit, ok, err := req.parseUIntParam(QueryParamLimit, 10) // TODO configurable default limit + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint("limit", limit) + } + + days, ok, err := req.parseUIntParam(QueryParamDays, 5) // TODO configurable default days + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint("days", days) + } + + logger := log.From(l) + + reqId := req.GetRequestId() + getEmailsBefore := time.Now() + emails, _, sessionState, state, lang, jerr := g.jmap.GetEmails(accountId, req.session, req.ctx, logger, req.language(), []string{id}, true, g.config.maxBodyValueBytes, false, false) + getEmailsDuration := time.Since(getEmailsBefore) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if len(emails) < 1 { + req.observe(g.metrics.EmailByIdDuration.WithLabelValues(req.session.JmapEndpoint, metrics.Values.Result.NotFound), getEmailsDuration.Seconds()) + logger.Trace().Msg("failed to find any emails matching id") // the id is already in the log field + return notFoundResponse(single(accountId), sessionState) + } else { + req.observe(g.metrics.EmailByIdDuration.WithLabelValues(req.session.JmapEndpoint, metrics.Values.Result.Found), getEmailsDuration.Seconds()) + } + + email := emails[0] + + beacon := email.ReceivedAt // TODO configurable: either relative to when the email was received, or relative to now + //beacon := time.Now() + filter := relatedEmailsFilter(email, beacon, days) + + // bgctx, _ := context.WithTimeout(context.Background(), time.Duration(30)*time.Second) // TODO configurable + bgctx := context.Background() + + g.job(logger, RelationTypeSameSender, func(jobId uint64, l *log.Logger) { + before := time.Now() + resultsByAccountId, _, _, lang, jerr := g.jmap.QueryEmails(single(accountId), filter, req.session, bgctx, l, req.language(), 0, limit, false, g.config.maxBodyValueBytes) + if results, ok := resultsByAccountId[accountId]; ok { + duration := time.Since(before) + if jerr != nil { + req.observeJmapError(jerr) + l.Error().Err(jerr).Msgf("failed to query %v emails", RelationTypeSameSender) + } else { + req.observe(g.metrics.EmailSameSenderDuration.WithLabelValues(req.session.JmapEndpoint), duration.Seconds()) + related, err := req.sanitizeEmails(filterEmails(results.Emails, email)) + if err == nil { + l.Trace().Msgf("'%v' found %v other emails", RelationTypeSameSender, len(related)) + if len(related) > 0 { + req.push(RelationEntityEmail, AboutEmailsEvent{Id: reqId, Emails: related, Source: RelationTypeSameSender, Language: lang}) + } + } + } + } + }) + + g.job(logger, RelationTypeSameThread, func(jobId uint64, l *log.Logger) { + before := time.Now() + emails, _, _, _, jerr := g.jmap.EmailsInThread(accountId, email.ThreadId, req.session, bgctx, l, req.language(), false, g.config.maxBodyValueBytes) + duration := time.Since(before) + if jerr != nil { + req.observeJmapError(jerr) + l.Error().Err(jerr).Msgf("failed to list %v emails", RelationTypeSameThread) + } else { + req.observe(g.metrics.EmailSameThreadDuration.WithLabelValues(req.session.JmapEndpoint), duration.Seconds()) + related, err := req.sanitizeEmails(filterEmails(emails, email)) + if err == nil { + l.Trace().Msgf("'%v' found %v other emails", RelationTypeSameThread, len(related)) + if len(related) > 0 { + req.push(RelationEntityEmail, AboutEmailsEvent{Id: reqId, Emails: related, Source: RelationTypeSameThread, Language: lang}) + } + } + } + }) + + sanitized, err := req.sanitizeEmail(email) + if err != nil { + return errorResponseWithSessionState(single(accountId), err, sessionState) + } + return etagResponse(single(accountId), AboutEmailResponse{ + Email: sanitized, + RequestId: reqId, + }, sessionState, EmailResponseObjectType, state, lang) + }) +} + +type EmailSummary struct { + // The id of the account this Email summary pertains to. + // required: true + // example: $accountId + AccountId string `json:"accountId,omitempty"` + + // The id of the Email object. + // + // Note that this is the JMAP object id, NOT the Message-ID header field value of the message [RFC5322]. + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + // + // required: true + // example: $emailId + Id string `json:"id,omitempty"` + + // The id of the Thread to which this Email belongs. + // + // example: $threadId + ThreadId string `json:"threadId,omitempty"` + + // The number of emails in the thread, including this one. + ThreadSize int `json:"threadSize,omitzero"` + + // The set of Mailbox ids this Email belongs to. + // + // An Email in the mail store MUST belong to one or more Mailboxes at all times (until it is destroyed). + // The set is represented as an object, with each key being a Mailbox id. + // + // The value for each key in the object MUST be true. + // + // example: $mailboxIds + MailboxIds map[string]bool `json:"mailboxIds,omitempty"` + + // A set of keywords that apply to the Email. + // + // The set is represented as an object, with the keys being the keywords. + // + // The value for each key in the object MUST be true. + // + // Keywords are shared with IMAP. + // + // The six system keywords from IMAP get special treatment. + // + // The following four keywords have their first character changed from \ in IMAP to $ in JMAP and have particular semantic meaning: + // + // - $draft: The Email is a draft the user is composing. + // - $seen: The Email has been read. + // - $flagged: The Email has been flagged for urgent/special attention. + // - $answered: The Email has been replied to. + // + // The IMAP \Recent keyword is not exposed via JMAP. The IMAP \Deleted keyword is also not present: IMAP uses a delete+expunge model, + // which JMAP does not. Any message with the \Deleted keyword MUST NOT be visible via JMAP (and so are not counted in the + // “totalEmails”, “unreadEmails”, “totalThreads”, and “unreadThreads” Mailbox properties). + // + // Users may add arbitrary keywords to an Email. + // For compatibility with IMAP, a keyword is a case-insensitive string of 1–255 characters in the ASCII subset + // %x21–%x7e (excludes control chars and space), and it MUST NOT include any of these characters: + // + // ( ) { ] % * " \ + // + // Because JSON is case sensitive, servers MUST return keywords in lowercase. + // + // The [IMAP and JMAP Keywords] registry as established in [RFC5788] assigns semantic meaning to some other + // keywords in common use. + // + // New keywords may be established here in the future. In particular, note: + // + // - $forwarded: The Email has been forwarded. + // - $phishing: The Email is highly likely to be phishing. + // Clients SHOULD warn users to take care when viewing this Email and disable links and attachments. + // - $junk: The Email is definitely spam. + // Clients SHOULD set this flag when users report spam to help train automated spam-detection systems. + // - $notjunk: The Email is definitely not spam. + // Clients SHOULD set this flag when users indicate an Email is legitimate, to help train automated spam-detection systems. + // + // [IMAP and JMAP Keywords]: https://www.iana.org/assignments/imap-jmap-keywords/ + // [RFC5788]: https://www.rfc-editor.org/rfc/rfc5788.html + // + // example: $emailKeywords + Keywords map[string]bool `json:"keywords,omitempty"` + + // The size, in octets, of the raw data for the message [RFC5322] + // (as referenced by the blobId, i.e., the number of octets in the file the user would download). + // + // [RFC5322]: https://www.rfc-editor.org/rfc/rfc5322.html + Size int `json:"size"` + + // The date the Email was received by the message store. + // + // This is the internal date in IMAP [RFC3501]. + // + // [RFC3501]: https://www.rfc-editor.org/rfc/rfc3501.html + // + // example: $emailReceivedAt + ReceivedAt time.Time `json:"receivedAt,omitzero"` + + // The value is identical to the value of header:Sender:asAddresses. + // example: $emailSenders + Sender []jmap.EmailAddress `json:"sender,omitempty"` + + // The value is identical to the value of header:From:asAddresses. + // example: $emailFroms + From []jmap.EmailAddress `json:"from,omitempty"` + + // The value is identical to the value of header:To:asAddresses. + // example: $emailTos + To []jmap.EmailAddress `json:"to,omitempty"` + + // The value is identical to the value of header:Cc:asAddresses. + // example: $emailCCs + Cc []jmap.EmailAddress `json:"cc,omitempty"` + + // The value is identical to the value of header:Bcc:asAddresses. + // example: $emailBCCs + Bcc []jmap.EmailAddress `json:"bcc,omitempty"` + + // The value is identical to the value of header:Subject:asText. + // example: $emailSubject + Subject string `json:"subject,omitempty"` + + // The value is identical to the value of header:Date:asDate. + // example: $emailSentAt + SentAt time.Time `json:"sentAt,omitzero"` + + // This is true if there are one or more parts in the message that a client UI should offer as downloadable. + // + // A server SHOULD set hasAttachment to true if the attachments list contains at least one item that + // does not have Content-Disposition: inline. + // + // The server MAY ignore parts in this list that are processed automatically in some way or are referenced + // as embedded images in one of the text/html parts of the message. + // + // The server MAY set hasAttachment based on implementation-defined or site-configurable heuristics. + // example: true + HasAttachment bool `json:"hasAttachment,omitempty"` + + // A list, traversing depth-first, of all parts in bodyStructure. + // + // They must satisfy either of the following conditions: + // + // - not of type multipart/* and not included in textBody or htmlBody + // - of type image/*, audio/*, or video/* and not in both textBody and htmlBody + // + // None of these parts include subParts, including message/* types. + // + // Attached messages may be fetched using the Email/parse method and the blobId. + // + // Note that a text/html body part HTML may reference image parts in attachments by using cid: + // links to reference the Content-Id, as defined in [RFC2392], or by referencing the Content-Location. + // + // [RFC2392]: https://www.rfc-editor.org/rfc/rfc2392.html + // + // example: $emailAttachments + Attachments []jmap.EmailBodyPart `json:"attachments,omitempty"` + + // A plaintext fragment of the message body. + // + // This is intended to be shown as a preview line when listing messages in the mail store and may be truncated + // when shown. + // + // The server may choose which part of the message to include in the preview; skipping quoted sections and + // salutations and collapsing white space can result in a more useful preview. + // + // This MUST NOT be more than 256 characters in length. + // + // As this is derived from the message content by the server, and the algorithm for doing so could change over + // time, fetching this for an Email a second time MAY return a different result. + // However, the previous value is not considered incorrect, and the change SHOULD NOT cause the Email object + // to be considered as changed by the server. + // + // example: $emailPreview + Preview string `json:"preview,omitempty"` +} + +func summarizeEmail(accountId string, email jmap.Email) EmailSummary { + return EmailSummary{ + AccountId: accountId, + Id: email.Id, + ThreadId: email.ThreadId, + ThreadSize: email.ThreadSize, + MailboxIds: email.MailboxIds, + Keywords: email.Keywords, + Size: email.Size, + ReceivedAt: email.ReceivedAt, + Sender: email.Sender, + From: email.From, + To: email.To, + Cc: email.Cc, + Bcc: email.Bcc, + Subject: email.Subject, + SentAt: email.SentAt, + HasAttachment: email.HasAttachment, + Attachments: email.Attachments, + Preview: email.Preview, + } +} + +type emailWithAccountId struct { + accountId string + email jmap.Email +} + +// When the request succeeds. +// swagger:response GetLatestEmailsSummaryForAllAccounts200 +type SwaggerGetLatestEmailsSummaryForAllAccounts200 struct { + // in: body + Body []EmailSummary +} + +// swagger:parameters get_latest_emails_summary_for_all_accounts +type SwaggerGetLatestEmailsSummaryForAllAccountsParams struct { + // The maximum amount of email summaries to return. + // in: query + // example: 10 + // default: 10 + Limit uint `json:"limit"` + + // Whether to include emails that have already been seen (read) or not. + // in: query + // example: true + // default: false + Seen bool `json:"seen"` + + // Whether to include emails that have been flagged as junk or phishing. + // in: query + // example: false + // default: false + Undesirable bool `json:"undesirable"` +} + +type EmailSummaries struct { + Emails []EmailSummary `json:"emails,omitempty"` + Total uint `json:"total,omitzero"` + Limit uint `json:"limit,omitzero"` + Offset uint `json:"offset,omitzero"` + State jmap.State `json:"state,omitempty"` +} + +// swagger:route GET /groupware/accounts/all/emails/latest/summary email get_latest_emails_summary_for_all_accounts +// Get a summary of the latest emails across all the mailboxes, across all of a user's accounts. +// +// Retrieves summaries of the latest emails of a user, in all accounts, across all mailboxes. +// +// The number of total summaries to retrieve is specified using the query parameter `limit`. +// +// The following additional query parameters may be specified to further filter the emails to summarize: +// +// !- `seen`: when `true`, emails that have already been seen (read) will be included as well (default is to only include emails that have not been read yet) +// !- `undesirable`: when `true`, emails that are flagged as spam or phishing will also be summarized (default is to ignore those) +// +// responses: +// +// 200: GetLatestEmailsSummaryForAllAccounts200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetLatestEmailsSummaryForAllAccounts(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + + allAccountIds := req.AllAccountIds() + l.Array(logAccountId, log.SafeStringArray(allAccountIds)) + + limit, ok, err := req.parseUIntParam(QueryParamLimit, 10) // TODO from configuration + if err != nil { + return errorResponse(allAccountIds, err) + } + if ok { + l = l.Uint(QueryParamLimit, limit) + } + + offset, ok, err := req.parseUIntParam(QueryParamOffset, 0) + if err != nil { + return errorResponse(allAccountIds, err) + } + if offset > 0 { + return notImplementesResponse() + } + if ok { + l = l.Uint(QueryParamOffset, limit) + } + + seen, ok, err := req.parseBoolParam(QueryParamSeen, false) + if err != nil { + return errorResponse(allAccountIds, err) + } + if ok { + l = l.Bool(QueryParamSeen, seen) + } + + undesirable, ok, err := req.parseBoolParam(QueryParamUndesirable, false) + if err != nil { + return errorResponse(allAccountIds, err) + } + if ok { + l = l.Bool(QueryParamUndesirable, undesirable) + } + + var filter jmap.EmailFilterElement = nil // all emails, read and unread + { + notKeywords := []string{} + if !seen { + notKeywords = append(notKeywords, jmap.JmapKeywordSeen) + } + if undesirable { + notKeywords = append(notKeywords, jmap.JmapKeywordJunk, jmap.JmapKeywordPhishing) + } + filter = filterFromNotKeywords(notKeywords) + } + + logger := log.From(l) + + emailsSummariesByAccount, sessionState, state, lang, jerr := g.jmap.QueryEmailSummaries(allAccountIds, req.session, req.ctx, logger, req.language(), filter, limit, true) + if jerr != nil { + return req.errorResponseFromJmap(allAccountIds, jerr) + } + + // sort in memory to respect the overall limit + total := uint(0) + for _, emails := range emailsSummariesByAccount { + total += uint(max(len(emails.Emails), 0)) + } + all := make([]emailWithAccountId, total) + i := uint(0) + for accountId, emails := range emailsSummariesByAccount { + for _, email := range emails.Emails { + all[i] = emailWithAccountId{accountId: accountId, email: email} + i++ + } + } + + slices.SortFunc(all, func(a, b emailWithAccountId) int { return -(a.email.ReceivedAt.Compare(b.email.ReceivedAt)) }) + + summaries := make([]EmailSummary, min(limit, total)) + for i = 0; i < limit && i < total; i++ { + summaries[i] = summarizeEmail(all[i].accountId, all[i].email) + } + + return etagResponse(allAccountIds, EmailSummaries{ + Emails: summaries, + Total: total, + Limit: limit, + Offset: offset, + }, sessionState, EmailResponseObjectType, state, lang) + }) +} + +func filterEmails(all []jmap.Email, skip jmap.Email) []jmap.Email { + filtered := all[:0] + for _, email := range all { + if skip.Id != email.Id { + filtered = append(filtered, email) + } + } + return filtered +} + +func filterFromNotKeywords(keywords []string) jmap.EmailFilterElement { + switch len(keywords) { + case 0: + return nil + case 1: + return jmap.EmailFilterCondition{NotKeyword: keywords[0]} + default: + conditions := make([]jmap.EmailFilterElement, len(keywords)) + for i, keyword := range keywords { + conditions[i] = jmap.EmailFilterCondition{NotKeyword: keyword} + } + return jmap.EmailFilterOperator{Operator: jmap.And, Conditions: conditions} + } +} + +var sanitizationPolicy *bluemonday.Policy = bluemonday.UGCPolicy() + +var sanitizableMediaTypes = []string{ + "text/html", + "text/xhtml", +} + +func (req *Request) sanitizeEmail(source jmap.Email) (jmap.Email, *Error) { + if !req.g.config.sanitize { + return source, nil + } + memory := map[string]int{} + for _, ref := range []*[]jmap.EmailBodyPart{&source.HtmlBody, &source.TextBody} { + newBody := make([]jmap.EmailBodyPart, len(*ref)) + for i, p := range *ref { + t, _, err := mime.ParseMediaType(p.Type) + if err != nil { + msg := fmt.Sprintf("failed to parse the mime type '%s'", p.Type) + req.logger.Error().Str("type", log.SafeString(p.Type)).Msg(msg) + return source, req.apiError(&ErrorFailedToSanitizeEmail, withDetail(msg)) + } + if slices.Contains(sanitizableMediaTypes, t) { + if already, done := memory[p.PartId]; !done { + if part, ok := source.BodyValues[p.PartId]; ok { + safe := sanitizationPolicy.Sanitize(part.Value) + part.Value = safe + source.BodyValues[p.PartId] = part + newLen := len(safe) + memory[p.PartId] = newLen + p.Size = newLen + } + } else { + p.Size = already + } + } + newBody[i] = p + } + *ref = newBody + } + + // we could post-process attachments as well: + /* + for _, part := range source.Attachments { + if part.Type == "" { + part.Type = "application/octet-stream" + } + if part.Name == "" { + part.Name = "unknown" + } + } + */ + + return source, nil +} + +func (req *Request) sanitizeEmails(source []jmap.Email) ([]jmap.Email, *Error) { + if !req.g.config.sanitize { + return source, nil + } + result := make([]jmap.Email, len(source)) + for i, email := range source { + sanitized, gwerr := req.sanitizeEmail(email) + if gwerr != nil { + return nil, gwerr + } + result[i] = sanitized + } + return result, nil +} diff --git a/services/groupware/pkg/groupware/groupware_api_identity.go b/services/groupware/pkg/groupware/groupware_api_identity.go new file mode 100644 index 0000000000..74d884489b --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_identity.go @@ -0,0 +1,136 @@ +package groupware + +import ( + "fmt" + "net/http" + "strings" + + "github.com/go-chi/chi/v5" + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" +) + +// When the request suceeds. +// swagger:response GetIdentitiesResponse +type SwaggerGetIdentitiesResponse struct { + // in: body + Body []jmap.Identity +} + +// swagger:route GET /groupware/accounts/{account}/identities identity identities +// Get the list of identities that are associated with an account. +// +// responses: +// +// 200: GetIdentitiesResponse +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetIdentities(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + res, sessionState, state, lang, jerr := g.jmap.GetAllIdentities(accountId, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + return etagResponse(single(accountId), res, sessionState, IdentityResponseObjectType, state, lang) + }) +} + +func (g *Groupware) GetIdentityById(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + id := chi.URLParam(r, UriParamIdentityId) + logger := log.From(req.logger.With().Str(logAccountId, accountId).Str(logIdentityId, id)) + res, sessionState, state, lang, jerr := g.jmap.GetIdentities(accountId, req.session, req.ctx, logger, req.language(), []string{id}) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + if len(res) < 1 { + return notFoundResponse(single(accountId), sessionState) + } + return etagResponse(single(accountId), res[0], sessionState, IdentityResponseObjectType, state, lang) + }) +} + +func (g *Groupware) AddIdentity(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + var identity jmap.Identity + err = req.body(&identity) + if err != nil { + return errorResponse(single(accountId), err) + } + + created, sessionState, state, lang, jerr := g.jmap.CreateIdentity(accountId, req.session, req.ctx, logger, req.language(), identity) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + return etagResponse(single(accountId), created, sessionState, IdentityResponseObjectType, state, lang) + }) +} + +func (g *Groupware) ModifyIdentity(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + var identity jmap.Identity + err = req.body(&identity) + if err != nil { + return errorResponse(single(accountId), err) + } + + updated, sessionState, state, lang, jerr := g.jmap.UpdateIdentity(accountId, req.session, req.ctx, logger, req.language(), identity) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + return etagResponse(single(accountId), updated, sessionState, IdentityResponseObjectType, state, lang) + }) +} + +func (g *Groupware) DeleteIdentity(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + id := chi.URLParam(r, UriParamIdentityId) + ids := strings.Split(id, ",") + if len(ids) < 1 { + return req.parameterErrorResponse(single(accountId), UriParamEmailId, fmt.Sprintf("Invalid value for path parameter '%v': '%s': %s", UriParamIdentityId, log.SafeString(id), "empty list of identity ids")) + } + + deletion, sessionState, state, _, jerr := g.jmap.DeleteIdentity(accountId, req.session, req.ctx, logger, req.language(), ids) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + notDeletedIds := structs.Missing(ids, deletion) + if len(notDeletedIds) == 0 { + return noContentResponseWithEtag(single(accountId), sessionState, IdentityResponseObjectType, state) + } else { + logger.Error().Array("not-deleted", log.SafeStringArray(notDeletedIds)).Msgf("failed to delete %d identities", len(notDeletedIds)) + return errorResponseWithSessionState(single(accountId), req.apiError(&ErrorFailedToDeleteSomeIdentities, + withMeta(map[string]any{"ids": notDeletedIds})), sessionState) + } + }) +} diff --git a/services/groupware/pkg/groupware/groupware_api_index.go b/services/groupware/pkg/groupware/groupware_api_index.go new file mode 100644 index 0000000000..9d2e975479 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_index.go @@ -0,0 +1,249 @@ +package groupware + +import ( + "net/http" + "slices" + "strings" + + "github.com/opencloud-eu/opencloud/pkg/jmap" +) + +type IndexLimits struct { + // The maximum file size, in octets, that the server will accept for a single file upload (for any purpose). + MaxSizeUpload int `json:"maxSizeUpload"` + + // The maximum number of concurrent requests the server will accept to the upload endpoint. + MaxConcurrentUpload int `json:"maxConcurrentUpload"` + + // The maximum size, in octets, that the server will accept for a single request to the API endpoint. + MaxSizeRequest int `json:"maxSizeRequest"` + + // The maximum number of concurrent requests the server will accept to the API endpoint. + MaxConcurrentRequests int `json:"maxConcurrentRequests"` +} + +type IndexAccountMailCapabilities struct { + // The maximum depth of the Mailbox hierarchy (i.e., one more than the maximum number of ancestors + // a Mailbox may have), or null for no limit. + MaxMailboxDepth int `json:"maxMailboxDepth"` + + // The maximum length, in (UTF-8) octets, allowed for the name of a Mailbox. + // + // This MUST be at least 100, although it is recommended servers allow more. + MaxSizeMailboxName int `json:"maxSizeMailboxName"` + + // The maximum number of Mailboxes that can be can assigned to a single Email object. + // + // This MUST be an integer >= 1, or null for no limit (or rather, the limit is always the number of + // Mailboxes in the account). + MaxMailboxesPerEmail int `json:"maxMailboxesPerEmail"` + + // The maximum total size of attachments, in octets, allowed for a single Email object. + // + // A server MAY still reject the import or creation of an Email with a lower attachment size total + // (for example, if the body includes several megabytes of text, causing the size of the encoded + // MIME structure to be over some server-defined limit). + // + // Note that this limit is for the sum of unencoded attachment sizes. Users are generally not + // knowledgeable about encoding overhead, etc., nor should they need to be, so marketing and help + // materials normally tell them the “max size attachments”. This is the unencoded size they see + // on their hard drive, so this capability matches that and allows the client to consistently + // enforce what the user understands as the limit. + MaxSizeAttachmentsPerEmail int `json:"maxSizeAttachmentsPerEmail"` + + // If true, the user may create a Mailbox in this account with a null parentId. + MayCreateTopLevelMailbox bool `json:"mayCreateTopLevelMailbox"` + + // The number in seconds of the maximum delay the server supports in sending. + // + // This is 0 if the server does not support delayed send. + MaxDelayedSend int `json:"maxDelayedSend"` +} + +type IndexAccountSieveCapabilities struct { + // The maximum length, in octets, allowed for the name of a SieveScript. + // + // For compatibility with ManageSieve, this MUST be at least 512 (up + // to 128 Unicode characters). + MaxSizeScriptName int `json:"maxSizeScriptName"` + + // The maximum size (in octets) of a Sieve script the server is willing + // to store for the user, or null for no limit. + MaxSizeScript int `json:"maxSizeScript"` + + // The maximum number of Sieve scripts the server is willing to store + // for the user, or null for no limit. + MaxNumberScripts int `json:"maxNumberScripts"` + + // The maximum number of Sieve "redirect" actions a script can perform + // during a single evaluation, or null for no limit. + // + // Note that this is different from the total number of "redirect" + // actions a script can contain. + MaxNumberRedirects int `json:"maxNumberRedirects"` +} + +// Capabilities of the Account. +type IndexAccountCapabilities struct { + Mail IndexAccountMailCapabilities `json:"mail"` + Sieve IndexAccountSieveCapabilities `json:"sieve"` +} + +type IndexAccount struct { + AccountId string `json:"accountId"` + + // A user-friendly string to show when presenting content from this Account, + // e.g., the email address representing the owner of the account. + Name string `json:"name"` + + // This is true if the Account belongs to the authenticated user rather than + // a group Account or a personal Account of another user that has been shared + // with them. + IsPersonal bool `json:"isPersonal"` + + // This is true if the entire Account is read-only. + IsReadOnly bool `json:"isReadOnly"` + + // Capabilities of the Account. + Capabilities IndexAccountCapabilities `json:"capabilities"` + + // The Identities associated with this Account. + Identities []jmap.Identity `json:"identities,omitempty"` + + // The quotas for this Account. + Quotas []jmap.Quota `json:"quotas,omitempty"` +} + +// Primary account identifiers per API usage type. +type IndexPrimaryAccounts struct { + Mail string `json:"mail"` + Submission string `json:"submission"` + Blob string `json:"blob"` + VacationResponse string `json:"vacationResponse"` + Sieve string `json:"sieve"` +} + +type IndexResponse struct { + // The API version. + Version string `json:"version"` + + // A list of capabilities of this API version. + Capabilities []string `json:"capabilities"` + + // API limits. + Limits IndexLimits `json:"limits"` + + // Accounts that are available to the user. + // + // The key of the map is the Account identifier. + Accounts []IndexAccount `json:"accounts"` + + // Primary account identifiers per API usage type. + PrimaryAccounts IndexPrimaryAccounts `json:"primaryAccounts"` +} + +// When the request suceeds. +// swagger:response IndexResponse +type SwaggerIndexResponse struct { + // in: body + Body struct { + *IndexResponse + } +} + +// swagger:route GET /groupware bootstrap index +// Get initial bootstrapping information for a user. +// +// responses: +// +// 200: IndexResponse +func (g *Groupware) Index(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountIds := req.AllAccountIds() + + boot, sessionState, state, lang, err := g.jmap.GetBootstrap(accountIds, req.session, req.ctx, req.logger, req.language()) + if err != nil { + return req.errorResponseFromJmap(accountIds, err) + } + + return etagResponse(accountIds, IndexResponse{ + Version: Version, + Capabilities: Capabilities, + Limits: buildIndexLimits(req.session), + Accounts: buildIndexAccounts(req.session, boot), + PrimaryAccounts: buildIndexPrimaryAccounts(req.session), + }, sessionState, IndexResponseObjectType, state, lang) + }) +} + +func buildIndexLimits(session *jmap.Session) IndexLimits { + result := IndexLimits{} + if core := session.Capabilities.Core; core != nil { + result.MaxSizeUpload = core.MaxSizeUpload + result.MaxConcurrentUpload = core.MaxConcurrentUpload + result.MaxSizeRequest = core.MaxSizeRequest + result.MaxConcurrentRequests = core.MaxConcurrentRequests + } + return result +} + +func buildIndexPrimaryAccounts(session *jmap.Session) IndexPrimaryAccounts { + return IndexPrimaryAccounts{ + Mail: session.PrimaryAccounts.Mail, + Submission: session.PrimaryAccounts.Submission, + Blob: session.PrimaryAccounts.Blob, + VacationResponse: session.PrimaryAccounts.VacationResponse, + Sieve: session.PrimaryAccounts.Sieve, + } +} + +func buildIndexAccounts(session *jmap.Session, boot map[string]jmap.AccountBootstrapResult) []IndexAccount { + accounts := make([]IndexAccount, len(session.Accounts)) + i := 0 + for accountId, account := range session.Accounts { + indexAccount := IndexAccount{ + AccountId: accountId, + Name: account.Name, + IsPersonal: account.IsPersonal, + IsReadOnly: account.IsReadOnly, + Capabilities: IndexAccountCapabilities{ + Mail: buildIndexAccountMailCapabilities(account), + Sieve: buildIndexAccountSieveCapabilities(account), + }, + } + if b, ok := boot[accountId]; ok { + indexAccount.Identities = b.Identities + indexAccount.Quotas = b.Quotas + } + accounts[i] = indexAccount + i++ + } + slices.SortFunc(accounts, func(a, b IndexAccount) int { return strings.Compare(a.AccountId, b.AccountId) }) + return accounts +} + +func buildIndexAccountMailCapabilities(account jmap.Account) IndexAccountMailCapabilities { + result := IndexAccountMailCapabilities{} + if mail := account.AccountCapabilities.Mail; mail != nil { + result.MaxMailboxDepth = mail.MaxMailboxDepth + result.MaxSizeMailboxName = mail.MaxSizeMailboxName + result.MaxMailboxesPerEmail = mail.MaxMailboxesPerEmail + result.MaxSizeAttachmentsPerEmail = mail.MaxSizeAttachmentsPerEmail + result.MayCreateTopLevelMailbox = mail.MayCreateTopLevelMailbox + } + if subm := account.AccountCapabilities.Submission; subm != nil { + result.MaxDelayedSend = subm.MaxDelayedSend + } + return result +} + +func buildIndexAccountSieveCapabilities(account jmap.Account) IndexAccountSieveCapabilities { + result := IndexAccountSieveCapabilities{} + if sieve := account.AccountCapabilities.Sieve; sieve != nil { + result.MaxSizeScriptName = sieve.MaxSizeScriptName + result.MaxSizeScript = sieve.MaxSizeScript + result.MaxNumberScripts = sieve.MaxNumberScripts + result.MaxNumberRedirects = sieve.MaxNumberRedirects + } + return result +} diff --git a/services/groupware/pkg/groupware/groupware_api_mailbox.go b/services/groupware/pkg/groupware/groupware_api_mailbox.go new file mode 100644 index 0000000000..f918681b26 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_mailbox.go @@ -0,0 +1,502 @@ +package groupware + +import ( + "net/http" + "slices" + "strings" + + "github.com/go-chi/chi/v5" + "github.com/rs/zerolog" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// When the request succeeds. +// swagger:response MailboxResponse200 +type SwaggerGetMailboxById200 struct { + // in: body + Body struct { + *jmap.Mailbox + } +} + +// swagger:route GET /groupware/accounts/{account}/mailboxes/{mailbox} mailbox mailboxes_by_id +// Get a specific mailbox by its identifier. +// +// A Mailbox represents a named set of Emails. +// This is the primary mechanism for organising Emails within an account. +// It is analogous to a folder or a label in other systems. +// +// responses: +// +// 200: MailboxResponse200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetMailbox(w http.ResponseWriter, r *http.Request) { + mailboxId := chi.URLParam(r, UriParamMailboxId) + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + + mailboxes, sessionState, state, lang, jerr := g.jmap.GetMailbox(accountId, req.session, req.ctx, req.logger, req.language(), []string{mailboxId}) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + if len(mailboxes.Mailboxes) == 1 { + return etagResponse(single(accountId), mailboxes.Mailboxes[0], sessionState, MailboxResponseObjectType, state, lang) + } else { + return notFoundResponse(single(accountId), sessionState) + } + }) +} + +// swagger:parameters mailboxes +type SwaggerMailboxesParams struct { + // The name of the mailbox, with substring matching. + // in: query + Name string `json:"name,omitempty"` + // The role of the mailbox. + // in: query + Role string `json:"role,omitempty"` + // Whether the mailbox is subscribed by the user or not. + // When omitted, the subscribed and unsubscribed mailboxes are returned. + // in: query + Subscribed bool `json:"subscribed,omitempty"` +} + +// When the request succeeds. +// swagger:response MailboxesResponse200 +type SwaggerMailboxesResponse200 struct { + // in: body + Body []jmap.Mailbox +} + +// swagger:route GET /groupware/accounts/{account}/mailboxes mailbox mailboxes +// Get the list of all the mailboxes of an account, potentially filtering on the +// name and/or role of the mailbox. +// +// A Mailbox represents a named set of Emails. +// This is the primary mechanism for organising Emails within an account. +// It is analogous to a folder or a label in other systems. +// +// When none of the query parameters are specified, all the mailboxes are returned. +// +// responses: +// +// 200: MailboxesResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) GetMailboxes(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + var filter jmap.MailboxFilterCondition + + hasCriteria := false + name := q.Get(QueryParamMailboxSearchName) + if name != "" { + filter.Name = name + hasCriteria = true + } + role := q.Get(QueryParamMailboxSearchRole) + if role != "" { + filter.Role = role + hasCriteria = true + } + + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + + subscribed, set, err := req.parseBoolParam(QueryParamMailboxSearchSubscribed, false) + if err != nil { + return errorResponse(single(accountId), err) + } + if set { + filter.IsSubscribed = &subscribed + hasCriteria = true + } + + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + if hasCriteria { + mailboxesByAccountId, sessionState, state, lang, err := g.jmap.SearchMailboxes(single(accountId), req.session, req.ctx, logger, req.language(), filter) + if err != nil { + return req.errorResponseFromJmap(single(accountId), err) + } + + if mailboxes, ok := mailboxesByAccountId[accountId]; ok { + return etagResponse(single(accountId), sortMailboxSlice(mailboxes), sessionState, MailboxResponseObjectType, state, lang) + } else { + return notFoundResponse(single(accountId), sessionState) + } + } else { + mailboxesByAccountId, sessionState, state, lang, err := g.jmap.GetAllMailboxes(single(accountId), req.session, req.ctx, logger, req.language()) + if err != nil { + return req.errorResponseFromJmap(single(accountId), err) + } + if mailboxes, ok := mailboxesByAccountId[accountId]; ok { + return etagResponse(single(accountId), sortMailboxSlice(mailboxes), sessionState, MailboxResponseObjectType, state, lang) + } else { + return notFoundResponse(single(accountId), sessionState) + } + } + }) +} + +// When the request succeeds. +// swagger:response MailboxesForAllAccountsResponse200 +type SwaggerMailboxesForAllAccountsResponse200 struct { + // in: body + Body map[string][]jmap.Mailbox +} + +// swagger:route GET /groupware/accounts/all/mailboxes mailboxesforallaccounts mailbox +// Get the list of all the mailboxes of all accounts of a user, potentially filtering on the +// role of the mailboxes. +// +// responses: +// +// 200: MailboxesForAllAccountsResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) GetMailboxesForAllAccounts(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + var filter jmap.MailboxFilterCondition + + hasCriteria := false + role := q.Get(QueryParamMailboxSearchRole) + if role != "" { + filter.Role = role + hasCriteria = true + } + + g.respond(w, r, func(req Request) Response { + accountIds := req.AllAccountIds() + if len(accountIds) < 1 { + return noContentResponse(nil, "") + } + logger := log.From(req.logger.With().Array(logAccountId, log.SafeStringArray(accountIds))) + + subscribed, set, err := req.parseBoolParam(QueryParamMailboxSearchSubscribed, false) + if err != nil { + return errorResponse(accountIds, err) + } + if set { + filter.IsSubscribed = &subscribed + hasCriteria = true + } + + if hasCriteria { + mailboxesByAccountId, sessionState, state, lang, err := g.jmap.SearchMailboxes(accountIds, req.session, req.ctx, logger, req.language(), filter) + if err != nil { + return req.errorResponseFromJmap(accountIds, err) + } + return etagResponse(accountIds, sortMailboxesMap(mailboxesByAccountId), sessionState, MailboxResponseObjectType, state, lang) + } else { + mailboxesByAccountId, sessionState, state, lang, err := g.jmap.GetAllMailboxes(accountIds, req.session, req.ctx, logger, req.language()) + if err != nil { + return req.errorResponseFromJmap(accountIds, err) + } + return etagResponse(accountIds, sortMailboxesMap(mailboxesByAccountId), sessionState, MailboxResponseObjectType, state, lang) + } + }) +} + +func (g *Groupware) GetMailboxByRoleForAllAccounts(w http.ResponseWriter, r *http.Request) { + role := chi.URLParam(r, UriParamRole) + g.respond(w, r, func(req Request) Response { + accountIds := req.AllAccountIds() + if len(accountIds) < 1 { + return noContentResponse(nil, "") + } + logger := log.From(req.logger.With().Array(logAccountId, log.SafeStringArray(accountIds)).Str("role", role)) + + filter := jmap.MailboxFilterCondition{ + Role: role, + } + + mailboxesByAccountId, sessionState, state, lang, err := g.jmap.SearchMailboxes(accountIds, req.session, req.ctx, logger, req.language(), filter) + if err != nil { + return req.errorResponseFromJmap(accountIds, err) + } + return etagResponse(accountIds, sortMailboxesMap(mailboxesByAccountId), sessionState, MailboxResponseObjectType, state, lang) + }) +} + +// When the request succeeds. +// swagger:response MailboxChangesResponse200 +type SwaggerMailboxChangesResponse200 struct { + // in: body + Body *jmap.MailboxChanges +} + +// swagger:route GET /groupware/accounts/{account}/mailboxes/{mailbox}/changes mailbox mailboxchanges +// Get the changes that occured in a given mailbox since a certain state. +// +// responses: +// +// 200: MailboxChangesResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) GetMailboxChanges(w http.ResponseWriter, r *http.Request) { + mailboxId := chi.URLParam(r, UriParamMailboxId) + sinceState := r.Header.Get(HeaderSince) + + g.respond(w, r, func(req Request) Response { + l := req.logger.With().Str(HeaderSince, sinceState) + + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l = l.Str(logAccountId, accountId) + + maxChanges, ok, err := req.parseUIntParam(QueryParamMaxChanges, 0) + if err != nil { + return errorResponse(single(accountId), err) + } + if ok { + l = l.Uint(QueryParamMaxChanges, maxChanges) + } + + logger := log.From(l) + + changes, sessionState, state, lang, jerr := g.jmap.GetMailboxChanges(accountId, req.session, req.ctx, logger, req.language(), mailboxId, sinceState, true, g.config.maxBodyValueBytes, maxChanges) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), changes, sessionState, MailboxResponseObjectType, state, lang) + }) +} + +// When the request succeeds. +// swagger:response MailboxChangesForAllAccountsResponse200 +type SwaggerMailboxChangesForAllAccountsResponse200 struct { + // in: body + Body map[string]jmap.MailboxChanges +} + +// swagger:route GET /groupware/accounts/all/mailboxes/changes mailbox mailboxchangesforallaccounts +// Get the changes that occured in all the mailboxes of all accounts. +// +// responses: +// +// 200: MailboxChangesForAllAccountsResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) GetMailboxChangesForAllAccounts(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + + allAccountIds := req.AllAccountIds() + l.Array(logAccountId, log.SafeStringArray(allAccountIds)) + + sinceStateMap, ok, err := req.parseMapParam(QueryParamSince) + if err != nil { + return errorResponse(allAccountIds, err) + } + if ok { + dict := zerolog.Dict() + for k, v := range sinceStateMap { + dict.Str(log.SafeString(k), log.SafeString(v)) + } + l = l.Dict(QueryParamSince, dict) + } + + maxChanges, ok, err := req.parseUIntParam(QueryParamMaxChanges, 0) + if err != nil { + return errorResponse(allAccountIds, err) + } + if ok { + l = l.Uint(QueryParamMaxChanges, maxChanges) + } + + logger := log.From(l) + + changesByAccountId, sessionState, state, lang, jerr := g.jmap.GetMailboxChangesForMultipleAccounts(allAccountIds, req.session, req.ctx, logger, req.language(), sinceStateMap, true, g.config.maxBodyValueBytes, maxChanges) + if jerr != nil { + return req.errorResponseFromJmap(allAccountIds, jerr) + } + + return etagResponse(allAccountIds, changesByAccountId, sessionState, MailboxResponseObjectType, state, lang) + }) +} + +func (g *Groupware) GetMailboxRoles(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + allAccountIds := req.AllAccountIds() + l.Array(logAccountId, log.SafeStringArray(allAccountIds)) + logger := log.From(l) + + rolesByAccountId, sessionState, state, lang, jerr := g.jmap.GetMailboxRolesForMultipleAccounts(allAccountIds, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(allAccountIds, jerr) + } + + return etagResponse(allAccountIds, rolesByAccountId, sessionState, MailboxResponseObjectType, state, lang) + }) +} + +func (g *Groupware) UpdateMailbox(w http.ResponseWriter, r *http.Request) { + mailboxId := chi.URLParam(r, UriParamMailboxId) + + g.respond(w, r, func(req Request) Response { + l := req.logger.With().Str(UriParamMailboxId, log.SafeString(mailboxId)) + + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l = l.Str(logAccountId, accountId) + + var body jmap.MailboxChange + err = req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(l) + + updated, sessionState, state, lang, jerr := g.jmap.UpdateMailbox(accountId, req.session, req.ctx, logger, req.language(), mailboxId, "", body) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), updated, sessionState, MailboxResponseObjectType, state, lang) + }) +} + +func (g *Groupware) CreateMailbox(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l = l.Str(logAccountId, accountId) + + var body jmap.MailboxChange + err = req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(l) + + created, sessionState, state, lang, jerr := g.jmap.CreateMailbox(accountId, req.session, req.ctx, logger, req.language(), "", body) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), created, sessionState, MailboxResponseObjectType, state, lang) + }) +} + +func (g *Groupware) DeleteMailbox(w http.ResponseWriter, r *http.Request) { + mailboxId := chi.URLParam(r, UriParamMailboxId) + mailboxIds := strings.Split(mailboxId, ",") + + g.respond(w, r, func(req Request) Response { + l := req.logger.With() + accountId, err := req.GetAccountIdForMail() + if err != nil { + return errorResponse(single(accountId), err) + } + l = l.Str(logAccountId, accountId) + + if len(mailboxIds) < 1 { + return noContentResponse(single(accountId), req.session.State) + } + + l = l.Array(UriParamMailboxId, log.SafeStringArray(mailboxIds)) + logger := log.From(l) + + deleted, sessionState, state, lang, jerr := g.jmap.DeleteMailboxes(accountId, req.session, req.ctx, logger, req.language(), "", mailboxIds) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), deleted, sessionState, MailboxResponseObjectType, state, lang) + }) +} + +var mailboxRoleSortOrderScore = map[string]int{ + jmap.JmapMailboxRoleInbox: 100, + jmap.JmapMailboxRoleDrafts: 200, + jmap.JmapMailboxRoleSent: 300, + jmap.JmapMailboxRoleJunk: 400, + jmap.JmapMailboxRoleTrash: 500, +} + +func scoreMailbox(m jmap.Mailbox) int { + if score, ok := mailboxRoleSortOrderScore[m.Role]; ok { + return score + } + return 1000 +} + +func sortMailboxesMap[K comparable](mailboxesByAccountId map[K][]jmap.Mailbox) map[K][]jmap.Mailbox { + sortedByAccountId := make(map[K][]jmap.Mailbox, len(mailboxesByAccountId)) + for accountId, unsorted := range mailboxesByAccountId { + mailboxes := make([]jmap.Mailbox, len(unsorted)) + copy(mailboxes, unsorted) + slices.SortFunc(mailboxes, compareMailboxes) + sortedByAccountId[accountId] = mailboxes + } + return sortedByAccountId +} + +func sortMailboxSlice(s []jmap.Mailbox) []jmap.Mailbox { + r := make([]jmap.Mailbox, len(s)) + copy(r, s) + slices.SortFunc(r, compareMailboxes) + return r +} + +func compareMailboxes(a, b jmap.Mailbox) int { + // first, use the defined order: + // Defines the sort order of Mailboxes when presented in the client’s UI, so it is consistent between devices. + // Default value: 0 + // The number MUST be an integer in the range 0 <= sortOrder < 2^31. + // A Mailbox with a lower order should be displayed before a Mailbox with a higher order + // (that has the same parent) in any Mailbox listing in the client’s UI. + sa := 0 + if a.SortOrder != nil { + sa = *a.SortOrder + } + sb := 0 + if b.SortOrder != nil { + sb = *b.SortOrder + } + r := sa - sb + if r != 0 { + return r + } + + // the JMAP specification says this: + // > Mailboxes with equal order SHOULD be sorted in alphabetical order by name. + // > The sorting should take into account locale-specific character order convention. + // but we feel like users would rather expect standard folders to come first, + // in an order that is common across MUAs: + // - inbox + // - drafts + // - sent + // - junk + // - trash + // - *everything else* + sa = scoreMailbox(a) + sb = scoreMailbox(b) + r = sa - sb + if r != 0 { + return r + } + + // now we have "everything else", let's use alphabetical order here: + return strings.Compare(a.Name, b.Name) +} diff --git a/services/groupware/pkg/groupware/groupware_api_quota.go b/services/groupware/pkg/groupware/groupware_api_quota.go new file mode 100644 index 0000000000..a57072bce3 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_quota.go @@ -0,0 +1,86 @@ +package groupware + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// When the request succeeds. +// swagger:response GetQuotaResponse200 +type SwaggerGetQuotaResponse200 struct { + // in: body + Body []jmap.Quota +} + +// swagger:route GET /groupware/accounts/{account}/quota quota get_quota +// Get quota limits. +// +// responses: +// +// 200: GetQuotaResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) GetQuota(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForQuota() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + res, sessionState, state, lang, jerr := g.jmap.GetQuotas(single(accountId), req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + for _, v := range res { + return etagResponse(single(accountId), v.List, sessionState, QuotaResponseObjectType, state, lang) + } + return notFoundResponse(single(accountId), sessionState) + }) +} + +type AccountQuota struct { + Quotas []jmap.Quota `json:"quotas,omitempty"` + State jmap.State `json:"state"` +} + +// When the request succeeds. +// swagger:response GetQuotaForAllAccountsResponse200 +type SwaggerGetQuotaForAllAccountsResponse200 struct { + // in: body + Body map[string]AccountQuota +} + +// swagger:route GET /groupware/accounts/all/quota quota get_quota_for_all_accounts +// Get quota limits for all accounts. +// +// responses: +// +// 200: GetQuotaForAllAccountsResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) GetQuotaForAllAccounts(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountIds := req.AllAccountIds() + if len(accountIds) < 1 { + return noContentResponse(accountIds, "") + } + logger := log.From(req.logger.With().Array(logAccountId, log.SafeStringArray(accountIds))) + + res, sessionState, state, lang, jerr := g.jmap.GetQuotas(accountIds, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(accountIds, jerr) + } + + result := make(map[string]AccountQuota, len(res)) + for accountId, accountQuotas := range res { + result[accountId] = AccountQuota{ + State: accountQuotas.State, + Quotas: accountQuotas.List, + } + } + return etagResponse(accountIds, result, sessionState, QuotaResponseObjectType, state, lang) + }) +} diff --git a/services/groupware/pkg/groupware/groupware_api_tasklists.go b/services/groupware/pkg/groupware/groupware_api_tasklists.go new file mode 100644 index 0000000000..221d6184a1 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_tasklists.go @@ -0,0 +1,107 @@ +package groupware + +import ( + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/opencloud-eu/opencloud/pkg/jmap" +) + +// When the request succeeds. +// swagger:response GetTaskLists200 +type SwaggerGetTaskLists200 struct { + // in: body + Body []jmap.TaskList +} + +// swagger:route GET /groupware/accounts/{account}/tasklists tasklist tasklists +// Get all tasklists of an account. +// +// responses: +// +// 200: GetTaskLists200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetTaskLists(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needTaskWithAccount() + if !ok { + return resp + } + var _ string = accountId + + return etagResponse(single(accountId), AllTaskLists, req.session.State, TaskListResponseObjectType, TaskListsState, "") + }) +} + +// When the request succeeds. +// swagger:response GetTaskListById200 +type SwaggerGetTaskListById200 struct { + // in: body + Body struct { + *jmap.TaskList + } +} + +// swagger:route GET /groupware/accounts/{account}/tasklists/{tasklistid} tasklist tasklist_by_id +// Get a tasklist by its identifier. +// +// responses: +// +// 200: GetTaskListById200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetTaskListById(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needTaskWithAccount() + if !ok { + return resp + } + var _ string = accountId + + tasklistId := chi.URLParam(r, UriParamTaskListId) + // TODO replace with proper implementation + for _, tasklist := range AllTaskLists { + if tasklist.Id == tasklistId { + return response(single(accountId), tasklist, req.session.State, "") + } + } + return etagNotFoundResponse(single(accountId), req.session.State, TaskListResponseObjectType, TaskListsState, "") + }) +} + +// When the request succeeds. +// swagger:response GetTasksInTaskList200 +type SwaggerGetTasksInTaskList200 struct { + // in: body + Body []jmap.Task +} + +// swagger:route GET /groupware/accounts/{account}/tasklists/{tasklistid}/tasks task tasks_in_tasklist +// Get all the tasks in a tasklist of an account by its identifier. +// +// responses: +// +// 200: GetTasksInTaskList200 +// 400: ErrorResponse400 +// 404: ErrorResponse404 +// 500: ErrorResponse500 +func (g *Groupware) GetTasksInTaskList(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + ok, accountId, resp := req.needTaskWithAccount() + if !ok { + return resp + } + var _ string = accountId + + tasklistId := chi.URLParam(r, UriParamTaskListId) + // TODO replace with proper implementation + tasks, ok := TaskMapByTaskListId[tasklistId] + if !ok { + return notFoundResponse(single(accountId), req.session.State) + } + return etagResponse(single(accountId), tasks, req.session.State, TaskResponseObjectType, TaskState, "") + }) +} diff --git a/services/groupware/pkg/groupware/groupware_api_vacation.go b/services/groupware/pkg/groupware/groupware_api_vacation.go new file mode 100644 index 0000000000..1e4db2938b --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_api_vacation.go @@ -0,0 +1,89 @@ +package groupware + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// When the request succeeds. +// swagger:response GetVacationResponse200 +type SwaggerGetVacationResponse200 struct { + // in: body + Body struct { + *jmap.VacationResponseGetResponse + } +} + +// swagger:route GET /groupware/accounts/{account}/vacation vacation getvacation +// Get vacation notice information. +// +// A vacation response sends an automatic reply when a message is delivered to the mail store, informing the original +// sender that their message may not be read for some time. +// +// The VacationResponse object represents the state of vacation-response-related settings for an account. +// +// responses: +// +// 200: GetVacationResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) GetVacation(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForVacationResponse() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + res, sessionState, state, lang, jerr := g.jmap.GetVacationResponse(accountId, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + return etagResponse(single(accountId), res, sessionState, VacationResponseResponseObjectType, state, lang) + }) +} + +// When the request succeeds. +// swagger:response SetVacationResponse200 +type SwaggerSetVacationResponse200 struct { + // in: body + Body struct { + *jmap.VacationResponse + } +} + +// swagger:route PUT /groupware/accounts/{account}/vacation vacation setvacation +// Set the vacation notice information. +// +// A vacation response sends an automatic reply when a message is delivered to the mail store, informing the original +// sender that their message may not be read for some time. +// +// responses: +// +// 200: SetVacationResponse200 +// 400: ErrorResponse400 +// 500: ErrorResponse500 +func (g *Groupware) SetVacation(w http.ResponseWriter, r *http.Request) { + g.respond(w, r, func(req Request) Response { + accountId, err := req.GetAccountIdForVacationResponse() + if err != nil { + return errorResponse(single(accountId), err) + } + logger := log.From(req.logger.With().Str(logAccountId, accountId)) + + var body jmap.VacationResponsePayload + err = req.body(&body) + if err != nil { + return errorResponse(single(accountId), err) + } + + res, sessionState, state, lang, jerr := g.jmap.SetVacationResponse(accountId, body, req.session, req.ctx, logger, req.language()) + if jerr != nil { + return req.errorResponseFromJmap(single(accountId), jerr) + } + + return etagResponse(single(accountId), res, sessionState, VacationResponseResponseObjectType, state, lang) + }) +} diff --git a/services/groupware/pkg/groupware/groupware_dns.go b/services/groupware/pkg/groupware/groupware_dns.go new file mode 100644 index 0000000000..15ef97d1e2 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_dns.go @@ -0,0 +1,167 @@ +package groupware + +import ( + "errors" + "net" + "net/url" + "slices" + "strconv" + "strings" + "time" + + "github.com/miekg/dns" +) + +var ( + errDnsNoServerToAnswer = errors.New("no name server to resolve") // TODO better error message +) + +type DnsSessionUrlResolver struct { + defaultSessionUrlSupplier func(string) (*url.URL, *GroupwareError) + defaultDomain string + domainGreenList []string + domainRedList []string + config *dns.ClientConfig + client *dns.Client +} + +func NewDnsSessionUrlResolver( + defaultSessionUrlSupplier func(string) (*url.URL, *GroupwareError), + defaultDomain string, + config *dns.ClientConfig, + domainGreenList []string, + domainRedList []string, + dialTimeout time.Duration, + readTimeout time.Duration, +) (DnsSessionUrlResolver, error) { + // TODO the whole udp or tcp dialier configuration, see https://github.com/miekg/exdns/blob/master/q/q.go + + c := &dns.Client{ + DialTimeout: dialTimeout, + ReadTimeout: readTimeout, + } + + return DnsSessionUrlResolver{ + defaultSessionUrlSupplier: defaultSessionUrlSupplier, + defaultDomain: defaultDomain, + config: config, + client: c, + }, nil +} + +func (d DnsSessionUrlResolver) isGreenListed(domain string) bool { + if d.domainGreenList == nil { + return true + } + // normalize the domain name by stripping a potential "." at the end + if strings.HasSuffix(domain, ".") { + domain = domain[0 : len(domain)-2] + } + return slices.Contains(d.domainGreenList, domain) +} + +func (d DnsSessionUrlResolver) isRedListed(domain string) bool { + if d.domainRedList == nil { + return true + } + // normalize the domain name by stripping a potential "." at the end + if strings.HasSuffix(domain, ".") { + domain = domain[0 : len(domain)-2] + } + return !slices.Contains(d.domainRedList, domain) +} + +func (d DnsSessionUrlResolver) Resolve(username string) (*url.URL, *GroupwareError) { + // heuristic to detect whether the username is an email address + parts := strings.Split(username, "@") + domain := d.defaultDomain + if len(parts) <= 1 { + // it's not, but do we have a defaultDomain configured that we should use + // nevertheless then? + if d.defaultDomain == "" { + // we don't, then let's fall back to the static session URL instead + return d.defaultSessionUrlSupplier(username) + } + } else { + domain = parts[len(parts)-1] + if !d.isGreenListed(domain) { + return nil, &ErrorUsernameEmailDomainIsNotGreenlisted + } + if d.isRedListed(domain) { + return nil, &ErrorUsernameEmailDomainIsRedlisted + } + } + + // https://jmap.io/spec-core.html#service-autodiscovery + // + // A JMAP-supporting host for the domain example.com SHOULD publish a + // SRV record _jmap._tcp.example.com + // that gives a hostname and port (usually port 443). + // + // The JMAP Session resource is then https://${hostname}[:${port}]/.well-known/jmap + // (following any redirects). + + // we need a fully qualified domain name: must end with a dot + name := dns.Fqdn("_jmap._tcp." + domain) + + msg := &dns.Msg{ + MsgHdr: dns.MsgHdr{RecursionDesired: true}, + Question: make([]dns.Question, 1), + } + msg.SetQuestion(name, dns.TypeSRV) + + r, err := d.dnsQuery(d.client, msg) + if err != nil { + // TODO error + } + if r == nil || r.Rcode == dns.RcodeNameError { + // TODO domain not found + } + + for _, ans := range r.Answer { + switch t := ans.(type) { + case *dns.SRV: + scheme := "https" + host := t.Target // TODO need to check whether the hostname is indeed in t.Target? + port := t.Port + if (scheme == "https" && port != 443) || (scheme == "http" && port != 80) { + host = net.JoinHostPort(host, strconv.Itoa(int(port))) + } + + u := &url.URL{ + Scheme: scheme, + Host: host, + Path: "/.well-known/jmap", + } + + return u, nil + } + } + + return d.defaultSessionUrlSupplier(username) +} + +func (d DnsSessionUrlResolver) dnsQuery(c *dns.Client, msg *dns.Msg) (*dns.Msg, error) { + for _, server := range d.config.Servers { + address := "" + // if the server is IPv6, it is already expected to be wrapped in [brackets] when + // the configuration comes from /etc/resolv.conf and has been parsed using + // dns.ClientConfigFromFile, but let's check to make sure + if strings.HasPrefix(server, "[") && strings.HasSuffix(server, "]") { + address = server + ":" + d.config.Port + } else { + // this function will take care of properly wrapping in [brackets] if it's + // an IPv6 address string: + address = net.JoinHostPort(server, d.config.Port) + } + + r, _, err := c.Exchange(msg, address) + if err != nil { + return nil, err + } + if r == nil || r.Rcode == dns.RcodeNameError || r.Rcode == dns.RcodeSuccess { + return r, err + } + } + return nil, errDnsNoServerToAnswer +} diff --git a/services/groupware/pkg/groupware/groupware_docs.go b/services/groupware/pkg/groupware/groupware_docs.go new file mode 100644 index 0000000000..c4a759ae30 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_docs.go @@ -0,0 +1,48 @@ +// OpenCloud Groupware API +// +// Documentation for the OpenCloud Groupware API +// +// Schemes: https +// BasePath: / +// Version: 1.0.0 +// Host: +// +// Consumes: +// - application/json +// +// Produces: +// - application/json +// +// Security: +// - bearer +// +// swagger:meta +package groupware + +// When the request contains invalid parameters. +// swagger:response ErrorResponse400 +type SwaggerErrorResponse400 struct { + // in: body + Body struct { + *ErrorResponse + } +} + +// When the requested object does not exist. +// swagger:response ErrorResponse404 +type SwaggerErrorResponse404 struct { +} + +// When the server was unable to complete the request. +// swagger:response ErrorResponse500 +type SwaggerErrorResponse500 struct { + // in: body + Body struct { + *ErrorResponse + } +} + +// When the request succeeds. +// swagger:response Success204 +type SwaggerSuccess204 struct { +} diff --git a/services/groupware/pkg/groupware/groupware_error.go b/services/groupware/pkg/groupware/groupware_error.go new file mode 100644 index 0000000000..1c1d177abd --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_error.go @@ -0,0 +1,648 @@ +package groupware + +import ( + "context" + "net/http" + "strconv" + + chimiddleware "github.com/go-chi/chi/v5/middleware" + "github.com/go-chi/render" + "github.com/google/uuid" + "github.com/opencloud-eu/opencloud/pkg/jmap" +) + +type Link struct { + // A string whose value is a URI-reference [RFC3986 Section 4.1] pointing to the link’s target. + // + // [RFC3986 Section 4.1]: https://datatracker.ietf.org/doc/html/rfc3986#section-4.1 + Href string `json:"href"` + // A string indicating the link’s relation type. The string MUST be a valid link relation type. + // required: false + Rel string `json:"rel,omitempty"` + // A string which serves as a label for the destination of a link such that it can be used as a human-readable identifier (e.g., a menu entry). + // required: false + Title string `json:"title,omitempty"` + // A string indicating the media type of the link’s target. + // required: false + Type string `json:"type,omitempty"` + // A meta object containing non-standard meta-information about the link. + // required: false + Meta map[string]any `json:"meta,omitempty"` +} + +type ErrorLinks struct { + // A link that leads to further details about this particular occurrence of the problem. + // When dereferenced, this URI SHOULD return a human-readable description of the error. + // This is either a string containing an URL, or a Link object. + About any `json:"about,omitempty"` + // A link that identifies the type of error that this particular error is an instance of. + // This URI SHOULD be dereferenceable to a human-readable explanation of the general error. + // This is either a string containing an URL, or a Link object. + Type any `json:"type"` +} + +type ErrorSource struct { + // A JSON Pointer [RFC6901] to the value in the request document that caused the error + // (e.g. "/data" for a primary data object, or "/data/attributes/title" for a specific attribute). + // This MUST point to a value in the request document that exists; if it doesn’t, the client SHOULD simply ignore the pointer. + // + // [RFC6901]: https://datatracker.ietf.org/doc/html/rfc6901 + Pointer string `json:"pointer,omitempty"` + // A string indicating which URI query parameter caused the error. + Parameter string `json:"parameter,omitempty"` + // A string indicating the name of a single request header which caused the error. + Header string `json:"header,omitempty"` +} + +// [Error] describes an error. +// +// [Error]: https://jsonapi.org/format/#error-objects +type Error struct { + // A unique identifier for this particular occurrence of the problem + Id string `json:"id"` + // Further detail links about the error. + // required: false + Links *ErrorLinks `json:"links,omitempty"` + // swagger:ignore + NumStatus int `json:"-"` + // The HTTP status code applicable to this problem, expressed as a string value. + Status string `json:"status"` + // An application-specific error code, expressed as a string value. + Code string `json:"code"` + // A short, human-readable summary of the problem that SHOULD NOT change from occurrence to occurrence of the problem. + Title string `json:"title,omitempty"` + // A human-readable explanation specific to this occurrence of the problem. + Detail string `json:"detail,omitempty"` + // An object containing references to the primary source of the error. + Source *ErrorSource `json:"source,omitempty"` + // A meta object containing non-standard meta-information about the error. + Meta map[string]any `json:"meta,omitempty"` +} + +// swagger:response ErrorResponse +type ErrorResponse struct { + // List of error objects + Errors []Error `json:"errors"` +} + +var _ render.Renderer = ErrorResponse{} + +func (e ErrorResponse) Render(w http.ResponseWriter, r *http.Request) error { + w.Header().Add("Content-Type", ContentTypeJsonApi) + if len(e.Errors) > 0 { + render.Status(r, e.Errors[0].NumStatus) + } else { + render.Status(r, http.StatusInternalServerError) + } + return nil +} + +const ( + // The [JSON:API] Content Type for errors + // + // [JSON:API]: https://jsonapi.org/ + ContentTypeJsonApi = "application/vnd.api+json" +) + +type GroupwareError struct { + Status int + Code string + Title string + Detail string +} + +func groupwareErrorFromJmap(j jmap.Error) *GroupwareError { + if j == nil { + return nil + } + switch j.Code() { + case jmap.JmapErrorAuthenticationFailed: + return &ErrorForbidden + case jmap.JmapErrorInvalidHttpRequest: + return &ErrorInvalidBackendRequest + case jmap.JmapErrorServerResponse: + return &ErrorServerResponse + case jmap.JmapErrorReadingResponseBody: + return &ErrorReadingResponse + case jmap.JmapErrorDecodingResponseBody: + return &ErrorProcessingResponse + case jmap.JmapErrorEncodingRequestBody: + return &ErrorEncodingRequestBody + case jmap.JmapErrorCreatingRequest: + return &ErrorCreatingRequest + case jmap.JmapErrorSendingRequest: + return &ErrorSendingRequest + case jmap.JmapErrorInvalidSessionResponse: + return &ErrorInvalidSessionResponse + case jmap.JmapErrorInvalidJmapRequestPayload: + return &ErrorInvalidRequestPayload + case jmap.JmapErrorInvalidJmapResponsePayload: + return &ErrorInvalidResponsePayload + case jmap.JmapErrorUnspecifiedType, jmap.JmapErrorUnknownMethod, jmap.JmapErrorInvalidArguments, jmap.JmapErrorInvalidResultReference: + return &ErrorInvalidGroupwareRequest + case jmap.JmapErrorServerUnavailable: + return &ErrorServerUnavailable + case jmap.JmapErrorServerFail: + return &ErrorServerFailure + case jmap.JmapErrorForbidden: + return &ErrorForbiddenOperation + case jmap.JmapErrorAccountNotFound: + return &ErrorAccountNotFound + case jmap.JmapErrorAccountNotSupportedByMethod: + return &ErrorAccountNotSupportedByMethod + case jmap.JmapErrorAccountReadOnly: + return &ErrorAccountReadOnly + default: + return &ErrorGeneric + } +} + +const ( + ErrorCodeGeneric = "ERRGEN" + ErrorCodeInvalidAuthentication = "AUTINV" + ErrorCodeMissingAuthentication = "AUTMIS" + ErrorCodeForbiddenGeneric = "AUTFOR" + ErrorCodeInvalidBackendRequest = "INVREQ" + ErrorCodeServerResponse = "SRVRSP" + ErrorCodeStreamingResponse = "SRVRST" + ErrorCodeServerReadingResponse = "SRVRRE" + ErrorCodeServerDecodingResponseBody = "SRVDRB" + ErrorCodeEncodingRequestBody = "ENCREQ" + ErrorCodeCreatingRequest = "CREREQ" + ErrorCodeSendingRequest = "SNDREQ" + ErrorCodeInvalidSessionResponse = "INVSES" + ErrorCodeInvalidRequestPayload = "INVRQP" + ErrorCodeInvalidResponsePayload = "INVRSP" + ErrorCodeInvalidRequestParameter = "INVPAR" + ErrorCodeMissingMandatoryRequestParameter = "MISMPA" + ErrorCodeInvalidRequestBody = "INVBDY" + ErrorCodeNonExistingAccount = "INVACC" + ErrorCodeIndeterminateAccount = "INDACC" + ErrorCodeApiInconsistency = "APIINC" + ErrorCodeInvalidUserRequest = "INVURQ" + ErrorCodeUsernameEmailDomainNotGreenListed = "UEDGRE" + ErrorCodeUsernameEmailDomainRedListed = "UEDRED" + ErrorCodeInvalidGroupwareRequest = "GPRERR" + ErrorCodeServerUnavailable = "SRVUNA" + ErrorCodeServerFailure = "SRVFLR" + ErrorCodeForbiddenOperation = "FRBOPR" + ErrorCodeAccountNotFound = "ACCNFD" + ErrorCodeAccountNotSupportedByMethod = "ACCNSM" + ErrorCodeAccountReadOnly = "ACCRDO" + ErrorCodeMissingCalendarsSessionCapability = "MSCCAL" + ErrorCodeMissingCalendarsAccountCapability = "MACCAL" + ErrorCodeMissingContactsSessionCapability = "MSCCON" + ErrorCodeMissingContactsAccountCapability = "MACCON" + ErrorCodeMissingTasksSessionCapability = "MSCTSK" + ErrorCodeMissingTaskAccountCapability = "MACTSK" + ErrorCodeFailedToDeleteEmail = "DELEML" + ErrorCodeFailedToDeleteSomeIdentities = "DELSID" + ErrorCodeFailedToSanitizeEmail = "FSANEM" + ErrorCodeFailedToDeleteContact = "DELCNT" + ErrorCodeNoMailboxWithDraftRole = "NMBXDR" + ErrorCodeNoMailboxWithSentRole = "NMBXSE" +) + +var ( + ErrorGeneric = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeGeneric, + Title: "Unspecific Error", + Detail: "Error without a specific description.", + } + ErrorInvalidAuthentication = GroupwareError{ + Status: http.StatusUnauthorized, + Code: ErrorCodeMissingAuthentication, + Title: "Invalid Authentication", + Detail: "Failed to determine the authentication credentials.", + } + ErrorMissingAuthentication = GroupwareError{ + Status: http.StatusUnauthorized, + Code: ErrorCodeMissingAuthentication, + Title: "Missing Authentication", + Detail: "No authentication credentials were provided.", + } + ErrorForbidden = GroupwareError{ + Status: http.StatusForbidden, + Code: ErrorCodeForbiddenGeneric, + Title: "Invalid Authentication", + Detail: "Authentication credentials were provided but are either invalid or not authorized to perform the request operation.", + } + ErrorInvalidBackendRequest = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeInvalidBackendRequest, + Title: "Invalid Request", + Detail: "The request that was meant to be sent to the mail server is invalid, which might be caused by configuration issues.", + } + ErrorServerResponse = GroupwareError{ + Status: http.StatusServiceUnavailable, + Code: ErrorCodeServerResponse, + Title: "Server responds with an Error", + Detail: "The mail server responded with an error.", + } + ErrorReadingResponse = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeServerResponse, + Title: "Server Response Body could not be decoded", + Detail: "The mail server response body could not be decoded.", + } + ErrorStreamingResponse = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeStreamingResponse, + Title: "Server Response Body could not be streamed", + Detail: "The mail server response body could not be streamed.", + } + ErrorProcessingResponse = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeServerResponse, + Title: "Server Response Body could not be decoded", + Detail: "The mail server response body could not be decoded.", + } + ErrorEncodingRequestBody = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeEncodingRequestBody, + Title: "Failed to encode the Request Body", + Detail: "Failed to encode the body of the request to be sent to the mail server.", + } + ErrorCreatingRequest = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeCreatingRequest, + Title: "Failed to create the Request", + Detail: "Failed to create the request to be sent to the mail server.", + } + ErrorSendingRequest = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeSendingRequest, + Title: "Failed to send the Request", + Detail: "Failed to send the request to the mail server.", + } + ErrorInvalidSessionResponse = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeInvalidSessionResponse, + Title: "Invalid JMAP Session Response", + Detail: "The JMAP session response that was provided by the mail server is invalid.", + } + ErrorInvalidRequestPayload = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeInvalidRequestPayload, + Title: "Invalid Request Payload", + Detail: "The request to the mail server is invalid.", + } + ErrorInvalidResponsePayload = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeInvalidResponsePayload, + Title: "Invalid Response Payload", + Detail: "The payload of the response received from the mail server is invalid.", + } + ErrorInvalidRequestParameter = GroupwareError{ + Status: http.StatusBadRequest, + Code: ErrorCodeInvalidRequestParameter, + Title: "Invalid Request Parameter", + Detail: "At least one of the parameters in the request is invalid.", + } + ErrorMissingMandatoryRequestParameter = GroupwareError{ + Status: http.StatusBadRequest, + Code: ErrorCodeMissingMandatoryRequestParameter, + Title: "Missing Mandatory Request Parameter", + Detail: "A mandatory request parameter is missing.", + } + ErrorInvalidRequestBody = GroupwareError{ + Status: http.StatusBadRequest, + Code: ErrorCodeInvalidRequestBody, + Title: "Invalid Request Body", + Detail: "The body of the request is invalid.", + } + ErrorInvalidUserRequest = GroupwareError{ + Status: http.StatusBadRequest, + Code: ErrorCodeInvalidUserRequest, + Title: "Invalid Request", + Detail: "The request is invalid.", + } + ErrorIndeterminateAccount = GroupwareError{ + Status: http.StatusBadRequest, + Code: ErrorCodeNonExistingAccount, + Title: "Invalid Account Parameter", + Detail: "The account the request is for does not exist.", + } + ErrorNonExistingAccount = GroupwareError{ + Status: http.StatusBadRequest, + Code: ErrorCodeIndeterminateAccount, + Title: "Failed to determine Account", + Detail: "The account the request is for could not be determined.", + } + ErrorApiInconsistency = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeApiInconsistency, + Title: "API Inconsistency", + Detail: "Internal APIs returned unexpected data.", + } + ErrorUsernameEmailDomainIsNotGreenlisted = GroupwareError{ + Status: http.StatusUnauthorized, + Code: ErrorCodeUsernameEmailDomainNotGreenListed, + Title: "Domain is not greenlisted", + Detail: "The username email address domain is not greenlisted.", + } + ErrorUsernameEmailDomainIsRedlisted = GroupwareError{ + Status: http.StatusUnauthorized, + Code: ErrorCodeUsernameEmailDomainRedListed, + Title: "Domain is redlisted", + Detail: "The username email address domain is redlisted.", + } + ErrorInvalidGroupwareRequest = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeInvalidGroupwareRequest, + Title: "Internal Request Error", + Detail: "The request constructed by the Groupware is regarded as invalid by the Mail server.", + } + ErrorServerUnavailable = GroupwareError{ + Status: http.StatusServiceUnavailable, + Code: ErrorCodeServerUnavailable, + Title: "Mail Server is unavailable", + Detail: "The Mail Server is currently unable to process the request.", + } + ErrorServerFailure = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeServerFailure, + Title: "Mail Server is unable to process the Request", + Detail: "The Mail Server is unable to process the request.", + } + ErrorForbiddenOperation = GroupwareError{ + Status: http.StatusForbidden, + Code: ErrorCodeForbiddenOperation, + Title: "The Operation is forbidden by the Mail Server", + Detail: "The Mail Server refuses to perform the request.", + } + ErrorAccountNotFound = GroupwareError{ + Status: http.StatusNotFound, + Code: ErrorCodeAccountNotFound, + Title: "The referenced Account does not exist", + Detail: "The Account that was referenced in the request does not exist.", + } + ErrorAccountNotSupportedByMethod = GroupwareError{ + Status: http.StatusForbidden, + Code: ErrorCodeAccountNotSupportedByMethod, + Title: "The referenced Account does not supported the requested method", + Detail: "The Account that was referenced in the request does not supported the requested method or data type.", + } + ErrorAccountReadOnly = GroupwareError{ + Status: http.StatusForbidden, + Code: ErrorCodeAccountReadOnly, + Title: "The referenced Account is read-only", + Detail: "The Account that was referenced in the request only supports read-only operations.", + } + ErrorMissingCalendarsSessionCapability = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeMissingCalendarsSessionCapability, + Title: "Session is missing the task capability '" + jmap.JmapCalendars + "'", + Detail: "The JMAP Session of the user does not have the required capability '" + jmap.JmapTasks + "'.", + } + ErrorMissingCalendarsAccountCapability = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeMissingCalendarsSessionCapability, + Title: "Account is missing the task capability '" + jmap.JmapCalendars + "'", + Detail: "The JMAP Account of the user does not have the required capability '" + jmap.JmapTasks + "'.", + } + ErrorMissingContactsSessionCapability = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeMissingContactsSessionCapability, + Title: "Session is missing the task capability '" + jmap.JmapContacts + "'", + Detail: "The JMAP Session of the user does not have the required capability '" + jmap.JmapContacts + "'.", + } + ErrorMissingContactsAccountCapability = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeMissingContactsSessionCapability, + Title: "Account is missing the task capability '" + jmap.JmapContacts + "'", + Detail: "The JMAP Account of the user does not have the required capability '" + jmap.JmapContacts + "'.", + } + ErrorMissingTasksSessionCapability = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeMissingTasksSessionCapability, + Title: "Session is missing the task capability '" + jmap.JmapTasks + "'", + Detail: "The JMAP Session of the user does not have the required capability '" + jmap.JmapTasks + "'.", + } + ErrorMissingTasksAccountCapability = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeMissingTasksSessionCapability, + Title: "Account is missing the task capability '" + jmap.JmapTasks + "'", + Detail: "The JMAP Account of the user does not have the required capability '" + jmap.JmapTasks + "'.", + } + ErrorFailedToDeleteEmail = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeFailedToDeleteEmail, + Title: "Failed to delete emails", + Detail: "One or more emails could not be deleted.", + } + ErrorFailedToDeleteSomeIdentities = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeFailedToDeleteSomeIdentities, + Title: "Failed to delete some Identities", + Detail: "Failed to delete some or all of the identities.", + } + ErrorFailedToSanitizeEmail = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeFailedToSanitizeEmail, + Title: "Failed to sanitize an email", + Detail: "Email content sanitization failed.", + } + ErrorFailedToDeleteContact = GroupwareError{ + Status: http.StatusInternalServerError, + Code: ErrorCodeFailedToDeleteContact, + Title: "Failed to delete contacts", + Detail: "One or more contacts could not be deleted.", + } + ErrorNoMailboxWithDraftRole = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeNoMailboxWithDraftRole, + Title: "Failed to find a Mailbox with the drafts role", + Detail: "We could not find a Mailbox that has the drafts role to store a draft email in.", + } + ErrorNoMailboxWithSentRole = GroupwareError{ + Status: http.StatusExpectationFailed, + Code: ErrorCodeNoMailboxWithSentRole, + Title: "Failed to find a Mailbox with the sent role", + Detail: "We could not find a Mailbox that has the sent role to store a sent email in.", + } +) + +type ErrorOpt interface { + apply(error *Error) +} + +type ErrorLinksOpt struct { + links *ErrorLinks +} + +func (o ErrorLinksOpt) apply(error *Error) { + error.Links = o.links +} + +var _ = withLinks // unused for now, but will be +func withLinks(links *ErrorLinks) ErrorLinksOpt { + return ErrorLinksOpt{ + links: links, + } +} + +type SourceLinksOpt struct { + source *ErrorSource +} + +func (o SourceLinksOpt) apply(error *Error) { + error.Source = o.source +} + +func withSource(source *ErrorSource) SourceLinksOpt { + return SourceLinksOpt{ + source: source, + } +} + +type MetaLinksOpt struct { + meta map[string]any +} + +func (o MetaLinksOpt) apply(error *Error) { + error.Meta = o.meta +} + +var _ = withMeta // unused for now, but will be +func withMeta(meta map[string]any) MetaLinksOpt { + return MetaLinksOpt{ + meta: meta, + } +} + +type CodeOpt struct { + code string +} + +func (o CodeOpt) apply(error *Error) { + error.Code = o.code +} + +var _ = withCode // unused for now, but will be +func withCode(code string) CodeOpt { + return CodeOpt{ + code: code, + } +} + +type TitleOpt struct { + title string + detail string +} + +func (o TitleOpt) apply(error *Error) { + error.Title = o.title + error.Detail = o.detail +} + +var _ = withTitle // unused for now, but will be +func withTitle(title string, detail string) TitleOpt { + return TitleOpt{ + title: title, + detail: detail, + } +} + +type DetailOpt struct { + detail string +} + +func (o DetailOpt) apply(error *Error) { + error.Detail = o.detail +} + +func withDetail(detail string) DetailOpt { + return DetailOpt{ + detail: detail, + } +} + +/* +func errorResponse(id string, error GroupwareError, options ...ErrorOpt) ErrorResponse { + err := Error{ + Id: id, + NumStatus: error.Status, + Status: strconv.Itoa(error.Status), + Code: error.Code, + Title: error.Title, + Detail: error.Detail, + } + + for _, o := range options { + o.apply(&err) + } + + return ErrorResponse{ + Errors: []Error{err}, + } +} +*/ + +func errorId(r *http.Request, ctx context.Context) string { + requestId := chimiddleware.GetReqID(ctx) + if requestId == "" { + requestId = r.Header.Get("x-request-id") + } + localId := uuid.NewString() + if requestId != "" { + return requestId + "." + localId + } else { + return localId + } +} + +func (r Request) errorId() string { + return errorId(r.r, r.ctx) +} + +func apiError(id string, gwerr GroupwareError, options ...ErrorOpt) *Error { + err := &Error{ + Id: id, + NumStatus: gwerr.Status, + Status: strconv.Itoa(gwerr.Status), + Code: gwerr.Code, + Title: gwerr.Title, + Detail: gwerr.Detail, + } + + for _, o := range options { + o.apply(err) + } + + return err +} + +func (r Request) observedParameterError(gwerr GroupwareError, options ...ErrorOpt) *Error { + return r.observeParameterError(apiError(r.errorId(), gwerr, options...)) +} + +func (r Request) apiError(err *GroupwareError, options ...ErrorOpt) *Error { + if err == nil { + return nil + } + errorId := r.errorId() + return apiError(errorId, *err, options...) +} + +func (r Request) apiErrorFromJmap(err jmap.Error) *Error { + if err == nil { + return nil + } + gwe := groupwareErrorFromJmap(err) + if gwe == nil { + return nil + } + + errorId := r.errorId() + return apiError(errorId, *gwe) +} + +func errorResponses(errors ...Error) ErrorResponse { + return ErrorResponse{Errors: errors} +} + +func (r Request) errorResponseFromJmap(accountIds []string, err jmap.Error) Response { + return errorResponse(accountIds, r.apiErrorFromJmap(r.observeJmapError(err))) +} diff --git a/services/groupware/pkg/groupware/groupware_framework.go b/services/groupware/pkg/groupware/groupware_framework.go new file mode 100644 index 0000000000..aaf5cce3b7 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_framework.go @@ -0,0 +1,785 @@ +package groupware + +import ( + "context" + "crypto/tls" + "encoding/json" + "fmt" + "net/http" + "net/url" + "slices" + "strings" + "sync/atomic" + "time" + + "github.com/go-chi/chi/v5" + "github.com/go-chi/render" + "github.com/gorilla/websocket" + "github.com/miekg/dns" + "github.com/r3labs/sse/v2" + "github.com/rs/zerolog" + + "github.com/prometheus/client_golang/prometheus" + + cmap "github.com/orcaman/concurrent-map" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" + + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" +) + +// Logging property keys. +const ( + logUsername = "username" + logUserId = "user-id" + logSessionState = "session-state" + logAccountId = "account-id" + logBlobAccountId = "blob-account-id" + logErrorId = "error-id" + logErrorCode = "code" + logErrorStatus = "status" + logErrorSourceHeader = "source-header" + logErrorSourceParameter = "source-parameter" + logErrorSourcePointer = "source-pointer" + logIdentityId = "identity-id" + logEmailId = "email-id" + logJobDescription = "job" + logJobId = "job-id" + logStreamId = "stream-id" + logPath = "path" + logMethod = "method" +) + +// Minimalistic representation of a user, containing only the attributes that are +// necessary for the Groupware implementation. +type user interface { + GetUsername() string + GetId() string +} + +// Provides a User that is associated with a request. +type userProvider interface { + // Provide the user for JMAP operations. + GetUser(req *http.Request, ctx context.Context, logger *log.Logger) (user, error) +} + +// Background job that needs to be executed asynchronously by the Groupware. +type Job struct { + // An identifier for the job, to use in logs for correlation. + id uint64 + // A human readable description of the job, to use in logs. + description string + // The logger to use for the job. + logger *log.Logger + // The function that performs the job. + job func(uint64, *log.Logger) +} + +type groupwareConfig struct { + maxBodyValueBytes uint + sanitize bool +} + +type groupwareDefaults struct { + emailLimit uint + contactLimit uint +} + +type Groupware struct { + mux *chi.Mux + metrics *metrics.Metrics + sseServer *sse.Server + // A map of all the SSE streams that have been created, in order to be able to iterate over them as, + // unfortunately, the sse implementation does not provide such a function. + // Key: the stream ID, which is the username + // Value: the timestamp of the creation of the stream + streams cmap.ConcurrentMap + logger *log.Logger + defaults groupwareDefaults + config groupwareConfig + // Caches successful and failed Sessions by the username. + sessionCache sessionCache + jmap *jmap.Client + userProvider userProvider + // SSE events that need to be pushed to clients. + eventChannel chan Event + // Background jobs that need to be executed. + jobsChannel chan Job + // A threadsafe counter to generate the job IDs. + jobCounter atomic.Uint64 +} + +// An error during the Groupware initialization. +type GroupwareInitializationError struct { + Message string + Err error +} + +func (e GroupwareInitializationError) Error() string { + if e.Message != "" { + return fmt.Sprintf("failed to create Groupware: %s: %v", e.Message, e.Err.Error()) + } else { + return fmt.Sprintf("failed to create Groupware: %v", e.Err.Error()) + } +} +func (e GroupwareInitializationError) Unwrap() error { + return e.Err +} + +// SSE Event. +type Event struct { + // The type of event, will be sent as the "type" attribute. + Type string + // The ID of the stream to push the event to, typically the username. + Stream string + // The payload of the event, will be serialized as JSON. + Body any +} + +// A jmap.HttpJmapApiClientEventListener implementation that records those JMAP +// events as metric increments. +type groupwareHttpJmapApiClientMetricsRecorder struct { + m *metrics.Metrics +} + +var _ jmap.HttpJmapApiClientEventListener = groupwareHttpJmapApiClientMetricsRecorder{} + +func (r groupwareHttpJmapApiClientMetricsRecorder) OnSuccessfulRequest(endpoint string, status int) { + r.m.SuccessfulRequestPerEndpointCounter.With(metrics.Endpoint(endpoint)).Inc() +} +func (r groupwareHttpJmapApiClientMetricsRecorder) OnFailedRequest(endpoint string, err error) { + r.m.FailedRequestPerEndpointCounter.With(metrics.Endpoint(endpoint)).Inc() +} +func (r groupwareHttpJmapApiClientMetricsRecorder) OnFailedRequestWithStatus(endpoint string, status int) { + r.m.FailedRequestStatusPerEndpointCounter.With(metrics.EndpointAndStatus(endpoint, status)).Inc() +} +func (r groupwareHttpJmapApiClientMetricsRecorder) OnResponseBodyReadingError(endpoint string, err error) { + r.m.ResponseBodyReadingErrorPerEndpointCounter.With(metrics.Endpoint(endpoint)).Inc() +} +func (r groupwareHttpJmapApiClientMetricsRecorder) OnResponseBodyUnmarshallingError(endpoint string, err error) { + r.m.ResponseBodyUnmarshallingErrorPerEndpointCounter.With(metrics.Endpoint(endpoint)).Inc() +} +func (r groupwareHttpJmapApiClientMetricsRecorder) OnSuccessfulWsRequest(endpoint string, status int) { + // TODO metrics for WSS +} +func (r groupwareHttpJmapApiClientMetricsRecorder) OnFailedWsHandshakeRequestWithStatus(endpoint string, status int) { + // TODO metrics for WSS +} + +func NewGroupware(config *config.Config, logger *log.Logger, mux *chi.Mux, prometheusRegistry prometheus.Registerer) (*Groupware, error) { + baseUrl, err := url.Parse(config.Mail.BaseUrl) + if err != nil { + logger.Error().Err(err).Msgf("failed to parse configured Mail.Baseurl '%v'", config.Mail.BaseUrl) + return nil, GroupwareInitializationError{Message: fmt.Sprintf("failed to parse configured Mail.BaseUrl '%s'", config.Mail.BaseUrl), Err: err} + } + + sessionUrl := baseUrl.JoinPath(".well-known", "jmap") + + masterUsername := config.Mail.Master.Username + if masterUsername == "" { + logger.Error().Msg("failed to parse empty Mail.Master.Username") + return nil, GroupwareInitializationError{Message: "Mail.Master.Username is empty"} + } + masterPassword := config.Mail.Master.Password + if masterPassword == "" { + logger.Error().Msg("failed to parse empty Mail.Master.Password") + return nil, GroupwareInitializationError{Message: "Mail.Master.Password is empty"} + } + + defaultEmailLimit := max(config.Mail.DefaultEmailLimit, 0) + maxBodyValueBytes := max(config.Mail.MaxBodyValueBytes, 0) + defaultContactLimit := max(config.Mail.DefaultContactLimit, 0) + responseHeaderTimeout := max(config.Mail.ResponseHeaderTimeout, 0) + sessionCacheMaxCapacity := uint64(max(config.Mail.SessionCache.MaxCapacity, 0)) + sessionCacheTtl := max(config.Mail.SessionCache.Ttl, 0) + sessionFailureCacheTtl := max(config.Mail.SessionCache.FailureTtl, 0) + wsHandshakeTimeout := config.Mail.PushHandshakeTimeout + + eventChannelSize := 100 // TODO make channel queue buffering size configurable + workerQueueSize := 100 // TODO configuration setting + workerPoolSize := 10 // TODO configuration setting + + keepStreamsAliveInterval := time.Duration(30) * time.Second // TODO configuration, make it 0 to disable keepalive + sseEventTtl := time.Duration(5) * time.Minute // TODO configuration setting + + useDnsForSessionResolution := false // TODO configuration setting, although still experimental, needs proper unit tests first + + insecureTls := true // TODO make configurable + + sanitize := true // TODO make configurable + + m := metrics.New(prometheusRegistry, logger) + + userProvider := newRevaContextUsernameProvider() + + jmapMetricsAdapter := groupwareHttpJmapApiClientMetricsRecorder{m: m} + + var jmapClient jmap.Client + { + var api *jmap.HttpJmapClient + { + // TODO add timeouts and other meaningful configuration settings for the HTTP client + var httpClient http.Client + { + httpTransport := http.DefaultTransport.(*http.Transport).Clone() + httpTransport.ResponseHeaderTimeout = responseHeaderTimeout + if insecureTls { + tlsConfig := &tls.Config{InsecureSkipVerify: true} + httpTransport.TLSClientConfig = tlsConfig + } + httpClient = *http.DefaultClient + httpClient.Transport = httpTransport + } + + api = jmap.NewHttpJmapClient( + &httpClient, + masterUsername, + masterPassword, + jmapMetricsAdapter, + ) + } + + var wsf *jmap.HttpWsClientFactory + { + wsDialer := &websocket.Dialer{ + HandshakeTimeout: wsHandshakeTimeout, + } + if insecureTls { + wsDialer.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} + } + + wsf, err = jmap.NewHttpWsClientFactory(wsDialer, masterUsername, masterPassword, logger, jmapMetricsAdapter) + if err != nil { + logger.Error().Err(err).Msg("failed to create websocket client") + return nil, GroupwareInitializationError{Message: "failed to create websocket client", Err: err} + } + } + + // api implements all three interfaces: + jmapClient = jmap.NewClient(api, api, api, wsf) + } + + sessionCacheBuilder := newSessionCacheBuilder( + sessionUrl, + logger, + jmapClient.FetchSession, + prometheusRegistry, + m, + sessionCacheMaxCapacity, + sessionCacheTtl, + sessionFailureCacheTtl, + ) + if useDnsForSessionResolution { + conf, err := dns.ClientConfigFromFile("/etc/resolv.conf") + if err != nil { + return nil, GroupwareInitializationError{Message: "failed to parse DNS client configuration from /etc/resolv.conf", Err: err} + } + + var dnsDomainGreenList []string = nil // TODO domain greenlist from configuration + var dnsDomainRedList []string = nil // TODO domain redlist from configuration + dnsDialTimeout := time.Duration(2) * time.Second // TODO DNS server connection timeout configuration + dnsReadTimeout := time.Duration(2) * time.Second // TODO DNS server response reading timeout configuration + defaultDomain := "example.com" // TODO default domain when the username is not an email address configuration + + sessionCacheBuilder = sessionCacheBuilder.withDnsAutoDiscovery( + defaultDomain, + conf, + dnsDialTimeout, + dnsReadTimeout, + dnsDomainGreenList, + dnsDomainRedList, + ) + } + + sessionCache, err := sessionCacheBuilder.build() + + if err != nil { + // assuming that the error was logged in great detail upstream + return nil, GroupwareInitializationError{Message: "failed to initialize the session cache", Err: err} + } + jmapClient.AddSessionEventListener(sessionCache) + + // A channel to process SSE Events with a single worker. + eventChannel := make(chan Event, eventChannelSize) + { + eventBufferSizeMetric, err := prometheus.NewConstMetric(m.EventBufferSizeDesc, prometheus.GaugeValue, float64(eventChannelSize)) + if err != nil { + logger.Warn().Err(err).Msgf("failed to create metric %v", m.EventBufferSizeDesc.String()) + } else { + prometheusRegistry.Register(metrics.ConstMetricCollector{Metric: eventBufferSizeMetric}) + } + prometheusRegistry.Register(prometheus.NewGaugeFunc(m.EventBufferQueuedOpts, func() float64 { + return float64(len(eventChannel)) + })) + } + + sseServer := sse.New() + sseServer.EventTTL = sseEventTtl + { + var sseSubscribers atomic.Int32 + sseServer.OnSubscribe = func(streamID string, sub *sse.Subscriber) { + sseSubscribers.Add(1) + } + sseServer.OnUnsubscribe = func(streamID string, sub *sse.Subscriber) { + sseSubscribers.Add(-1) + } + prometheusRegistry.Register(prometheus.NewGaugeFunc(m.SSESubscribersOpts, func() float64 { + return float64(sseSubscribers.Load()) + })) + } + + jobsChannel := make(chan Job, workerQueueSize) + { + totalWorkerBufferMetric, err := prometheus.NewConstMetric(m.WorkersBufferSizeDesc, prometheus.GaugeValue, float64(workerQueueSize)) + if err != nil { + logger.Warn().Err(err).Msgf("failed to create metric %v", m.WorkersBufferSizeDesc.String()) + } else { + prometheusRegistry.Register(metrics.ConstMetricCollector{Metric: totalWorkerBufferMetric}) + } + + prometheusRegistry.Register(prometheus.NewGaugeFunc(m.WorkersBufferQueuedOpts, func() float64 { + return float64(len(jobsChannel)) + })) + } + + var busyWorkers atomic.Int32 + { + totalWorkersMetric, err := prometheus.NewConstMetric(m.TotalWorkersDesc, prometheus.GaugeValue, float64(workerPoolSize)) + if err != nil { + logger.Warn().Err(err).Msgf("failed to create metric %v", m.TotalWorkersDesc.String()) + } else { + prometheusRegistry.Register(metrics.ConstMetricCollector{Metric: totalWorkersMetric}) + } + + prometheusRegistry.Register(prometheus.NewGaugeFunc(m.BusyWorkersOpts, func() float64 { + return float64(busyWorkers.Load()) + })) + } + + g := &Groupware{ + mux: mux, + metrics: m, + sseServer: sseServer, + streams: cmap.New(), + logger: logger, + sessionCache: sessionCache, + userProvider: userProvider, + jmap: &jmapClient, + defaults: groupwareDefaults{ + emailLimit: defaultEmailLimit, + contactLimit: defaultContactLimit, + }, + config: groupwareConfig{ + maxBodyValueBytes: maxBodyValueBytes, + sanitize: sanitize, + }, + eventChannel: eventChannel, + jobsChannel: jobsChannel, + jobCounter: atomic.Uint64{}, + } + + for w := 1; w <= workerPoolSize; w++ { + go g.worker(jobsChannel, &busyWorkers) + } + + if keepStreamsAliveInterval != 0 { + ticker := time.NewTicker(keepStreamsAliveInterval) + //defer ticker.Stop() + go func() { + for range ticker.C { + g.keepStreamsAlive() + } + }() + } + + go g.listenForEvents() + + return g, nil +} + +func (g *Groupware) worker(jobs <-chan Job, busy *atomic.Int32) { + for job := range jobs { + busy.Add(1) + before := time.Now() + logger := log.From(job.logger.With().Str(logJobDescription, job.description).Uint64(logJobId, job.id)) + job.job(job.id, logger) + if logger.Trace().Enabled() { + logger.Trace().Msgf("finished job %d [%s] in %v", job.id, job.description, time.Since(before)) + } + busy.Add(-1) + } +} + +func (g *Groupware) job(logger *log.Logger, description string, f func(uint64, *log.Logger)) uint64 { + id := g.jobCounter.Add(1) + before := time.Now() + g.jobsChannel <- Job{id: id, description: description, logger: logger, job: f} + g.logger.Trace().Msgf("pushed job %d [%s] in %v", id, description, time.Since(before)) // TODO remove + return id +} + +func (g *Groupware) listenForEvents() { + for ev := range g.eventChannel { + data, err := json.Marshal(ev.Body) + if err == nil { + published := g.sseServer.TryPublish(ev.Stream, &sse.Event{ + Event: []byte(ev.Type), + Data: data, + }) + if !published && g.logger.Debug().Enabled() { + g.logger.Debug().Str(logStreamId, log.SafeString(ev.Stream)).Msgf("dropped SSE event") // TODO more details + } + } else { + g.logger.Error().Err(err).Msgf("failed to serialize %T body to JSON", ev) + } + } +} + +func (g *Groupware) push(user user, typ string, body any) { + g.metrics.SSEEventsCounter.WithLabelValues(typ).Inc() + g.eventChannel <- Event{Type: typ, Stream: user.GetUsername(), Body: body} +} + +func (g *Groupware) ServeHTTP(w http.ResponseWriter, r *http.Request) { + g.mux.ServeHTTP(w, r) +} + +func (g *Groupware) addStream(stream string) bool { + return g.streams.SetIfAbsent(stream, time.Now()) +} + +func (g *Groupware) keepStreamsAlive() { + event := &sse.Event{Comment: []byte("keepalive")} + g.streams.IterCb(func(stream string, created any) { + g.sseServer.Publish(stream, event) + }) +} + +func (g *Groupware) ServeSSE(w http.ResponseWriter, r *http.Request) { + g.withSession(w, r, func(req Request) Response { + stream := req.GetUser().GetUsername() + + if g.addStream(stream) { + str := g.sseServer.CreateStream(stream) + if g.logger.Trace().Enabled() { + g.logger.Trace().Msgf("created stream '%v'", log.SafeString(str.ID)) + } + } + + q := r.URL.Query() + q.Set("stream", stream) + r.URL.RawQuery = q.Encode() + + g.sseServer.ServeHTTP(w, r) + return Response{} + }) +} + +// Provide a JMAP Session for the given user +func (g *Groupware) session(user user, logger *log.Logger) (jmap.Session, bool, *GroupwareError, time.Time) { + if user == nil { + logger.Warn().Msg("user is nil") + return jmap.Session{}, false, nil, time.Time{} + } + name := user.GetUsername() + if name == "" { + logger.Warn().Msg("user has an empty username") + return jmap.Session{}, false, nil, time.Time{} + } + + // first look into the session cache + s := g.sessionCache.Get(name) + if s != nil { + if s.Success() { + return s.Get(), true, nil, s.Until() + } else { + return jmap.Session{}, false, s.Error(), s.Until() + } + } + // not sure this should/could happen: + logger.Warn().Msg("session cache returned nil") + return jmap.Session{}, false, nil, time.Time{} +} + +func (g *Groupware) log(error *Error) { + var level *zerolog.Event + if error.NumStatus < 300 { + // shouldn't land here, but just in case: 1xx and 2xx are "OK" and should be logged as debug + level = g.logger.Debug() + } else if error.NumStatus == http.StatusUnauthorized || error.NumStatus == http.StatusForbidden { + // security related errors are logged as warnings + level = g.logger.Warn() + } else if error.NumStatus >= 500 { + // internal errors are potentially cause for concerned: bugs or third party systems malfunctioning, log as errors + level = g.logger.Error() + } else { + // everything else should be 4xx which indicates mistakes from the client, log as debug + level = g.logger.Debug() + } + if !level.Enabled() { + return + } + l := level.Str(logErrorCode, error.Code).Str(logErrorId, error.Id).Int(logErrorStatus, error.NumStatus) + if error.Source != nil { + if error.Source.Header != "" { + l.Str(logErrorSourceHeader, log.SafeString(error.Source.Header)) + } + if error.Source.Parameter != "" { + l.Str(logErrorSourceParameter, log.SafeString(error.Source.Parameter)) + } + if error.Source.Pointer != "" { + l.Str(logErrorSourcePointer, log.SafeString(error.Source.Pointer)) + } + } + l.Msg(error.Title) +} + +func (g *Groupware) serveError(w http.ResponseWriter, r *http.Request, error *Error, retryAfter time.Time) { + if error == nil { + return + } + g.log(error) + w.Header().Add("Content-Type", ContentTypeJsonApi) + if !retryAfter.IsZero() { + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Retry-After + // either as an absolute timestamp: + // w.Header().Add("Retry-After", retryAfter.UTC().Format(time.RFC1123)) + // or as a delay in seconds: + w.Header().Add("Retry-After", fmt.Sprintf("%.0f", time.Until(retryAfter).Seconds())) + } + render.Status(r, error.NumStatus) + w.WriteHeader(error.NumStatus) + render.Render(w, r, errorResponses(*error)) +} + +// Execute a closure with a JMAP Session. +// +// Returns +// - a Response object +// - if an error occurs, after which timestamp a retry is allowed +// - whether the request was sent to the server or not +func (g *Groupware) withSession(w http.ResponseWriter, r *http.Request, handler func(r Request) Response) (Response, time.Time, bool) { + ctx := r.Context() + sl := g.logger.SubloggerWithRequestID(ctx) + logger := &sl + + // retrieve the current user from the inbound request + var user user + { + var err error + user, err = g.userProvider.GetUser(r, ctx, logger) + if err != nil { + g.metrics.AuthenticationFailureCounter.Inc() + g.serveError(w, r, apiError(errorId(r, ctx), ErrorInvalidAuthentication), time.Time{}) + return Response{}, time.Time{}, false + } + if user == nil { + g.metrics.AuthenticationFailureCounter.Inc() + g.serveError(w, r, apiError(errorId(r, ctx), ErrorMissingAuthentication), time.Time{}) + return Response{}, time.Time{}, false + } + + logger = log.From(logger.With().Str(logUserId, log.SafeString(user.GetId()))) + } + + // retrieve a JMAP Session for that user + var session jmap.Session + { + s, ok, gwerr, retryAfter := g.session(user, logger) + if gwerr != nil { + g.metrics.SessionFailureCounter.Inc() + errorId := errorId(r, ctx) + logger.Error().Str("code", gwerr.Code).Str("error", gwerr.Title).Str("detail", gwerr.Detail).Str(logErrorId, errorId).Msg("failed to determine JMAP session") + g.serveError(w, r, apiError(errorId, *gwerr), retryAfter) + return Response{}, retryAfter, false + } + if ok { + session = s + } else { + // no session = authentication failed + g.metrics.SessionFailureCounter.Inc() + errorId := errorId(r, ctx) + logger.Error().Str(logErrorId, errorId).Msg("could not authenticate, failed to find Session") + gwerr = &ErrorInvalidAuthentication + g.serveError(w, r, apiError(errorId, *gwerr), retryAfter) + return Response{}, retryAfter, false + } + } + + decoratedLogger := decorateLogger(logger, session) + + // build the Request object + req := Request{ + g: g, + user: user, + r: r, + ctx: ctx, + logger: decoratedLogger, + session: &session, + } + + // perform the actual request using the closure that was passed in + response := handler(req) + + // return the result of that closure execution + return response, time.Time{}, true +} + +const ( + SessionStateResponseHeader = "Session-State" + StateResponseHeader = "State" + ObjectTypeResponseHeader = "Object-Type" + AccountIdResponseHeader = "Account-Id" + AccountIdsResponseHeader = "Account-Ids" +) + +// Send the Response object as an HTTP response. +func (g *Groupware) sendResponse(w http.ResponseWriter, r *http.Request, response Response) { + if response.err != nil { + g.log(response.err) + w.Header().Add("Content-Type", ContentTypeJsonApi) + render.Status(r, response.err.NumStatus) + render.Render(w, r, errorResponses(*response.err)) + return + } + + if response.sessionState != "" { + w.Header().Add(SessionStateResponseHeader, string(response.sessionState)) + } + + if response.contentLanguage != "" { + w.Header().Add("Content-Language", string(response.contentLanguage)) + } + + notModified := false + { + etag := string(response.etag) + if etag != "" { + challenge := r.Header.Get("if-none-match") // https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/If-None-Match + quotedEtag := "\"" + etag + "\"" // https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/ETag#etag_value + notModified = challenge != "" && (challenge == etag || challenge == quotedEtag) // be a bit flexible/permissive here with the quoting + w.Header().Add("ETag", quotedEtag) + w.Header().Add(StateResponseHeader, etag) + } + } + { + ot := string(response.objectType) + if ot != "" { + w.Header().Add(ObjectTypeResponseHeader, ot) + } + } + switch len(response.accountIds) { + case 0: + break + case 1: + w.Header().Add(AccountIdResponseHeader, response.accountIds[0]) + default: + c := make([]string, len(response.accountIds)) + copy(c, response.accountIds) + slices.Sort(c) + value := strings.Join(c, ",") + w.Header().Add(AccountIdsResponseHeader, value) + } + + if notModified { + w.WriteHeader(http.StatusNotModified) + } else { + switch response.body { + case nil, "": + w.WriteHeader(response.status) + default: + render.Status(r, http.StatusOK) + render.JSON(w, r, response.body) + } + } +} + +func (g *Groupware) respond(w http.ResponseWriter, r *http.Request, handler func(r Request) Response) { + response, _, ok := g.withSession(w, r, handler) + if !ok { + return + } + g.sendResponse(w, r, response) +} + +func (g *Groupware) stream(w http.ResponseWriter, r *http.Request, handler func(r Request, w http.ResponseWriter) *Error) { + ctx := r.Context() + sl := g.logger.SubloggerWithRequestID(ctx) + logger := &sl + + user, err := g.userProvider.GetUser(r, ctx, logger) + if err != nil { + g.serveError(w, r, apiError(errorId(r, ctx), ErrorInvalidAuthentication), time.Time{}) + return + } + if user == nil { + g.serveError(w, r, apiError(errorId(r, ctx), ErrorMissingAuthentication), time.Time{}) + return + } + + logger = log.From(logger.With().Str(logUserId, log.SafeString(user.GetId()))) + + session, ok, gwerr, retryAfter := g.session(user, logger) + if gwerr != nil { + errorId := errorId(r, ctx) + logger.Error().Str("code", gwerr.Code).Str("error", gwerr.Title).Str("detail", gwerr.Detail).Str(logErrorId, errorId).Msg("failed to determine JMAP session") + g.serveError(w, r, apiError(errorId, *gwerr), retryAfter) + return + } + if !ok { + // no session = authentication failed + errorId := errorId(r, ctx) + logger.Error().Str(logErrorId, errorId).Msg("could not authenticate, failed to find Session") + gwerr = &ErrorInvalidAuthentication + g.serveError(w, r, apiError(errorId, *gwerr), retryAfter) + return + } + + decoratedLogger := decorateLogger(logger, session) + + req := Request{ + g: g, + user: user, + r: r, + ctx: ctx, + logger: decoratedLogger, + session: &session, + } + + apierr := handler(req, w) + if apierr != nil { + g.log(apierr) + w.Header().Add("Content-Type", ContentTypeJsonApi) + render.Status(r, apierr.NumStatus) + w.WriteHeader(apierr.NumStatus) + render.Render(w, r, errorResponses(*apierr)) + } +} + +func (g *Groupware) NotFound(w http.ResponseWriter, r *http.Request) { + level := g.logger.Debug() + if level.Enabled() { + path := log.SafeString(r.URL.Path) + method := log.SafeString(r.Method) + level.Str(logPath, path).Str(logMethod, method).Int(logErrorStatus, http.StatusNotFound).Msgf("unmatched path: '%v'", path) + } + w.Header().Add("Unmatched-Path", r.URL.Path) // TODO possibly remove this in production for security reasons? + w.WriteHeader(http.StatusNotFound) +} + +func (g *Groupware) MethodNotAllowed(w http.ResponseWriter, r *http.Request) { + level := g.logger.Debug() + if level.Enabled() { + path := log.SafeString(r.URL.Path) + method := log.SafeString(r.Method) + level.Str(logPath, path).Str(logMethod, method).Int(logErrorStatus, http.StatusNotFound).Msgf("method not allowed: '%v'", method) + } + w.Header().Add("Unsupported-Method", r.Method) // TODO possibly remove this in production for security reasons? + w.WriteHeader(http.StatusNotFound) +} + +func single[S any](s S) []S { + return []S{s} +} diff --git a/services/groupware/pkg/groupware/groupware_mock_tasks.go b/services/groupware/pkg/groupware/groupware_mock_tasks.go new file mode 100644 index 0000000000..59d875a47b --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_mock_tasks.go @@ -0,0 +1,218 @@ +package groupware + +import ( + "time" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/jscalendar" +) + +var TL1 = jmap.TaskList{ + Id: "aemua9ai", + Role: jmap.TaskListRoleInbox, + Name: "Your Tasks", + Description: "Your default list of tasks", + Color: "purple", + KeywordColors: map[string]string{ + "todo": "blue", + "done": "green", + }, + CategoryColors: map[string]string{ + "work": "magenta", + }, + SortOrder: 1, + IsSubscribed: true, + TimeZone: "CEST", + WorkflowStatuses: []string{ + "new", "todo", "in-progress", "done", + }, + ShareWith: map[string]jmap.TaskRights{ + "eefeeb4p": { + MayReadItems: true, + MayWriteAll: false, + MayWriteOwn: true, + MayUpdatePrivate: false, + MayRSVP: false, + MayAdmin: false, + MayDelete: false, + }, + }, + MyRights: &jmap.TaskRights{ + MayReadItems: true, + MayWriteAll: true, + MayWriteOwn: true, + MayUpdatePrivate: true, + MayRSVP: true, + MayAdmin: false, + MayDelete: false, + }, + DefaultAlertsWithTime: map[string]jscalendar.Alert{ + "saenee7a": { + Type: jscalendar.AlertType, + Trigger: jscalendar.OffsetTrigger{ + Type: jscalendar.OffsetTriggerType, + Offset: "-PT10M", + RelativeTo: jscalendar.RelativeToStart, + }, + Action: jscalendar.AlertActionEmail, + }, + }, + DefaultAlertsWithoutTime: map[string]jscalendar.Alert{ + "xiipaew9": { + Type: jscalendar.AlertType, + Trigger: jscalendar.OffsetTrigger{ + Type: jscalendar.OffsetTriggerType, + Offset: "-PT12H", + RelativeTo: jscalendar.RelativeToStart, + }, + Action: jscalendar.AlertActionDisplay, + }, + }, +} + +var T1 = jmap.Task{ + Id: "laoj0ahk", + TaskListId: TL1.Id, + IsDraft: false, + UtcStart: jmap.UTCDate("2025-10-02T10:00:00Z"), + UtcDue: jmap.UTCDate("2025-10-12T18:00:00Z"), + SortOrder: 1, + WorkflowStatus: "new", + Task: jscalendar.Task{ + Type: jscalendar.TaskType, + Object: jscalendar.Object{ + CommonObject: jscalendar.CommonObject{ + Uid: "7da0d4a2-385c-430f-9022-61db302734d9", + ProdId: "Mock 0.0", + Created: "2025-10-01T17:31:49", + Updated: "2025-10-01T17:35:12", + Title: "Crossing the Ring", + Description: "We need to cross the Ring the protomolecule opened.", + DescriptionContentType: "text/plain", + Links: map[string]jscalendar.Link{ + "theisha5": { + Type: jscalendar.LinkType, + Href: "https://static.wikia.nocookie.net/expanse/images/e/ed/S03E09-SlowZone_01.jpg/revision/latest/scale-to-width-down/1000?cb=20180611184722", + ContentType: "image/jpeg", + Size: 109212, + Rel: jscalendar.RelIcon, + Display: "sol gate", + Title: "The Sol Ring Gate", + }, + }, + Locale: "en-GB", + Keywords: map[string]bool{ + "todo": true, + }, + Categories: map[string]bool{ + "work": true, + }, + Color: "yellow", + }, + Sequence: 1, + ShowWithoutTime: false, + Locations: map[string]jscalendar.Location{ + "ruoth5uu": { + Type: jscalendar.LocationType, + Name: "Sol Gate", + LocationTypes: map[jscalendar.LocationTypeOption]bool{ + jscalendar.LocationTypeOptionLandmarkAddress: true, + }, + Coordinates: "geo:40.4165583,-3.7063595", + Links: map[string]jscalendar.Link{ + "jeeshei5": { + Type: jscalendar.LinkType, + Href: "https://expanse.fandom.com/wiki/Sol_gate", + ContentType: "text/html", + Title: "The Sol Gate", + }, + }, + }, + }, + Priority: 1, + FreeBusyStatus: jscalendar.FreeBusyStatusBusy, + Privacy: jscalendar.PrivacySecret, + Alerts: map[string]jscalendar.Alert{ + "eiphuw4a": { + Type: jscalendar.AlertType, + Trigger: jscalendar.AbsoluteTrigger{ + Type: jscalendar.AbsoluteTriggerType, + When: mustParseTime("2025-12-01T10:11:12Z"), + }, + Action: jscalendar.AlertActionDisplay, + }, + }, + TimeZone: "UTC", + MayInviteSelf: true, + MayInviteOthers: true, + HideAttendees: true, + }, + Due: jscalendar.LocalDateTime("2025-12-01T10:11:12"), + Start: jscalendar.LocalDateTime("2025-10-01T08:00:00"), + EstimatedDuration: "PT8W", + PercentComplete: 5, + Progress: jscalendar.ProgressNeedsAction, + ProgressUpdated: mustParseTime("2025-10-01T08:12:39Z"), + }, + EstimatedWork: 4, + Impact: "block", + IsOrigin: true, + MayInviteSelf: true, + MayInviteOthers: true, + HideAttendees: false, + Checklists: map[string]jmap.Checklist{ + "sae9aimu": { + Type: jmap.ChecklistType, + Title: "Prerequisites", + CheckItems: []jmap.CheckItem{ + { + Type: jmap.CheckItemType, + Title: "Control Medina Station", + SortOrder: 1, + IsComplete: true, + Updated: jmap.UTCDate("2025-04-01T09:32:10Z"), + Assignee: &jmap.TaskPerson{ + Type: jmap.TaskPersonType, + Name: "Fred Johnson", + Uri: "mailto:johnson@opa.org", + PrincipalId: "nae5hu9t", + }, + Comments: map[string]jmap.Comment{ + "ooze1iet": { + Type: jmap.CommentType, + Message: "We first need to control Medina Station before we can get through the Sol Gate", + Created: jmap.UTCDate("2025-04-01T12:11:10Z"), + Updated: jmap.UTCDate("2025-04-01T12:29:19Z"), + Author: &jmap.TaskPerson{ + Type: jmap.TaskPersonType, + Name: "Anderson Dawes", + Uri: "mailto:adawes@opa.org", + PrincipalId: "eshi9oot", + }, + }, + }, + }, + }, + }, + }, +} + +var AllTaskLists = []jmap.TaskList{TL1} + +var TaskListsState = jmap.State("mock") + +var TaskMapByTaskListId = map[string][]jmap.Task{ + TL1.Id: { + T1, + }, +} + +var TaskState = jmap.State("mock") + +func mustParseTime(text string) time.Time { + t, err := time.Parse(time.RFC3339, text) + if err != nil { + panic(err) + } + return t +} diff --git a/services/groupware/pkg/groupware/groupware_request.go b/services/groupware/pkg/groupware/groupware_request.go new file mode 100644 index 0000000000..beb5d8b4ec --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_request.go @@ -0,0 +1,461 @@ +package groupware + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "slices" + "strconv" + "strings" + "time" + + "github.com/go-chi/chi/v5" + chimiddleware "github.com/go-chi/chi/v5/middleware" + + "github.com/prometheus/client_golang/prometheus" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/structs" + + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" + groupwaremiddleware "github.com/opencloud-eu/opencloud/services/groupware/pkg/middleware" +) + +const ( + // TODO remove this once Stalwart has actual support for Tasks, Calendars, Contacts and we don't need to mock it any more + IgnoreSessionCapabilityChecks = true +) + +// using a wrapper class for requests, to group multiple parameters, really to avoid crowding the +// API of handlers but also to make it easier to expand it in the future without having to modify +// the parameter list of every single handler function +type Request struct { + g *Groupware + user user + r *http.Request + ctx context.Context + logger *log.Logger + session *jmap.Session +} + +func isDefaultAccountid(accountId string) bool { + return slices.Contains(defaultAccountIds, accountId) +} + +func (r Request) push(typ string, event any) { + r.g.push(r.user, typ, event) +} + +func (r Request) GetUser() user { + return r.user +} + +func (r Request) GetRequestId() string { + return chimiddleware.GetReqID(r.ctx) +} + +func (r Request) GetTraceId() string { + return groupwaremiddleware.GetTraceID(r.ctx) +} + +var ( + errNoPrimaryAccountFallback = errors.New("no primary account fallback") + errNoPrimaryAccountForMail = errors.New("no primary account for mail") + errNoPrimaryAccountForBlob = errors.New("no primary account for blob") + errNoPrimaryAccountForVacationResponse = errors.New("no primary account for vacation response") + errNoPrimaryAccountForSubmission = errors.New("no primary account for submission") + errNoPrimaryAccountForTask = errors.New("no primary account for task") + errNoPrimaryAccountForCalendar = errors.New("no primary account for calendar") + errNoPrimaryAccountForContact = errors.New("no primary account for contact") + errNoPrimaryAccountForQuota = errors.New("no primary account for quota") + // errNoPrimaryAccountForSieve = errors.New("no primary account for sieve") + // errNoPrimaryAccountForWebsocket = errors.New("no primary account for websocket") +) + +func (r Request) AllAccountIds() []string { + // TODO potentially filter on "subscribed" accounts? + return structs.Uniq(structs.Keys(r.session.Accounts)) +} + +func (r Request) GetAccountIdWithoutFallback() (string, *Error) { + accountId := chi.URLParam(r.r, UriParamAccountId) + if accountId == "" || isDefaultAccountid(accountId) { + r.logger.Error().Err(errNoPrimaryAccountFallback).Msg("failed to determine the accountId") + return "", apiError(r.errorId(), ErrorNonExistingAccount, + withDetail("Failed to determine the account to use"), + withSource(&ErrorSource{Parameter: UriParamAccountId}), + ) + } + return accountId, nil +} + +func (r Request) getAccountId(fallback string, err error) (string, *Error) { + accountId := chi.URLParam(r.r, UriParamAccountId) + if accountId == "" || isDefaultAccountid(accountId) { + accountId = fallback + } + if accountId == "" { + r.logger.Error().Err(err).Msg("failed to determine the accountId") + return "", apiError(r.errorId(), ErrorNonExistingAccount, + withDetail("Failed to determine the account to use"), + withSource(&ErrorSource{Parameter: UriParamAccountId}), + ) + } + return accountId, nil +} + +func (r Request) GetAccountIdForMail() (string, *Error) { + return r.getAccountId(r.session.PrimaryAccounts.Mail, errNoPrimaryAccountForMail) +} + +func (r Request) GetAccountIdForBlob() (string, *Error) { + return r.getAccountId(r.session.PrimaryAccounts.Blob, errNoPrimaryAccountForBlob) +} + +func (r Request) GetAccountIdForVacationResponse() (string, *Error) { + return r.getAccountId(r.session.PrimaryAccounts.VacationResponse, errNoPrimaryAccountForVacationResponse) +} + +func (r Request) GetAccountIdForQuota() (string, *Error) { + return r.getAccountId(r.session.PrimaryAccounts.Quota, errNoPrimaryAccountForQuota) +} + +func (r Request) GetAccountIdForSubmission() (string, *Error) { + return r.getAccountId(r.session.PrimaryAccounts.Blob, errNoPrimaryAccountForSubmission) +} + +func (r Request) GetAccountIdForTask() (string, *Error) { + // TODO we don't have these yet, not implemented in Stalwart + // return r.getAccountId(r.session.PrimaryAccounts.Task, errNoPrimaryAccountForTask) + return r.GetAccountIdForMail() +} + +func (r Request) GetAccountIdForCalendar() (string, *Error) { + // TODO we don't have these yet, not implemented in Stalwart + // return r.getAccountId(r.session.PrimaryAccounts.Calendar, errNoPrimaryAccountForCalendar) + return r.GetAccountIdForMail() +} + +func (r Request) GetAccountIdForContact() (string, *Error) { + // TODO we don't have these yet, not implemented in Stalwart + // return r.getAccountId(r.session.PrimaryAccounts.Contact, errNoPrimaryAccountForContact) + return r.GetAccountIdForMail() +} + +func (r Request) GetAccountForMail() (string, jmap.Account, *Error) { + accountId, err := r.GetAccountIdForMail() + if err != nil { + return "", jmap.Account{}, err + } + + account, ok := r.session.Accounts[accountId] + if !ok { + r.logger.Debug().Msgf("failed to find account '%v'", accountId) + // TODO metric for inexistent accounts + return accountId, jmap.Account{}, apiError(r.errorId(), ErrorNonExistingAccount, + withDetail(fmt.Sprintf("The account '%v' does not exist", log.SafeString(accountId))), + withSource(&ErrorSource{Parameter: UriParamAccountId}), + ) + } + return accountId, account, nil +} + +func (r Request) parameterError(param string, detail string) *Error { + return r.observedParameterError(ErrorInvalidRequestParameter, + withDetail(detail), + withSource(&ErrorSource{Parameter: param})) +} + +func (r Request) parameterErrorResponse(accountIds []string, param string, detail string) Response { + return errorResponse(accountIds, r.parameterError(param, detail)) +} + +func (r Request) getStringParam(param string, defaultValue string) (string, bool) { + q := r.r.URL.Query() + if !q.Has(param) { + return defaultValue, false + } + str := q.Get(param) + if str == "" { + return defaultValue, false + } + return str, true +} + +func (r Request) getMandatoryStringParam(param string) (string, *Error) { + str := "" + q := r.r.URL.Query() + if q.Has(param) { + str = q.Get(param) + } + if str == "" { + msg := fmt.Sprintf("Missing required value for query parameter '%v'", param) + return "", r.observedParameterError(ErrorMissingMandatoryRequestParameter, + withDetail(msg), + withSource(&ErrorSource{Parameter: param}), + ) + } + return str, nil +} + +func (r Request) parseIntParam(param string, defaultValue int) (int, bool, *Error) { + q := r.r.URL.Query() + if !q.Has(param) { + return defaultValue, false, nil + } + + str := q.Get(param) + if str == "" { + return defaultValue, false, nil + } + + value, err := strconv.ParseInt(str, 10, 0) + if err != nil { + // don't include the original error, as it leaks too much about our implementation, e.g.: + // strconv.ParseInt: parsing \"a\": invalid syntax + msg := fmt.Sprintf("Invalid numeric value for query parameter '%v': '%s'", param, log.SafeString(str)) + return defaultValue, true, r.observedParameterError(ErrorInvalidRequestParameter, + withDetail(msg), + withSource(&ErrorSource{Parameter: param}), + ) + } + return int(value), true, nil +} + +func (r Request) parseUIntParam(param string, defaultValue uint) (uint, bool, *Error) { + q := r.r.URL.Query() + if !q.Has(param) { + return defaultValue, false, nil + } + + str := q.Get(param) + if str == "" { + return defaultValue, false, nil + } + + value, err := strconv.ParseUint(str, 10, 0) + if err != nil { + // don't include the original error, as it leaks too much about our implementation, e.g.: + // strconv.ParseInt: parsing \"a\": invalid syntax + msg := fmt.Sprintf("Invalid numeric value for query parameter '%v': '%s'", param, log.SafeString(str)) + return defaultValue, true, r.observedParameterError(ErrorInvalidRequestParameter, + withDetail(msg), + withSource(&ErrorSource{Parameter: param}), + ) + } + return uint(value), true, nil +} + +func (r Request) parseDateParam(param string) (time.Time, bool, *Error) { + q := r.r.URL.Query() + if !q.Has(param) { + return time.Time{}, false, nil + } + + str := q.Get(param) + if str == "" { + return time.Time{}, false, nil + } + + t, err := time.Parse(time.RFC3339, str) + if err != nil { + msg := fmt.Sprintf("Invalid RFC3339 value for query parameter '%v': '%s': %s", param, log.SafeString(str), err.Error()) + return time.Time{}, true, r.observedParameterError(ErrorInvalidRequestParameter, + withDetail(msg), + withSource(&ErrorSource{Parameter: param}), + ) + } + return t, true, nil +} + +func (r Request) parseBoolParam(param string, defaultValue bool) (bool, bool, *Error) { + q := r.r.URL.Query() + if !q.Has(param) { + return defaultValue, false, nil + } + + str := q.Get(param) + if str == "" { + return defaultValue, false, nil + } + + b, err := strconv.ParseBool(str) + if err != nil { + msg := fmt.Sprintf("Invalid boolean value for query parameter '%v': '%s': %s", param, log.SafeString(str), err.Error()) + return defaultValue, true, r.observedParameterError(ErrorInvalidRequestParameter, + withDetail(msg), + withSource(&ErrorSource{Parameter: param}), + ) + } + return b, true, nil +} + +func (r Request) parseMapParam(param string) (map[string]string, bool, *Error) { + q := r.r.URL.Query() + if !q.Has(param) { + return map[string]string{}, false, nil + } + + result := map[string]string{} + prefix := param + "." + for name, values := range q { + if strings.HasPrefix(name, prefix) { + if len(values) > 0 { + key := name[len(prefix)+1:] + result[key] = values[0] + } + } + } + return result, true, nil +} + +func (r Request) body(target any) *Error { + body := r.r.Body + defer func(b io.ReadCloser) { + err := b.Close() + if err != nil { + r.logger.Error().Err(err).Msg("failed to close request body") + } + }(body) + + err := json.NewDecoder(body).Decode(target) + if err != nil { + r.logger.Warn().Msgf("failed to deserialize the request body: %s", err.Error()) + return r.observedParameterError(ErrorInvalidRequestBody, withSource(&ErrorSource{Pointer: "/"})) // we don't get any details here + } + return nil +} + +func (r Request) language() string { + return r.r.Header.Get("Accept-Language") +} + +func (r Request) observe(obs prometheus.Observer, value float64) { + metrics.WithExemplar(obs, value, r.GetRequestId(), r.GetTraceId()) +} + +func (r Request) observeParameterError(err *Error) *Error { + if err != nil { + r.g.metrics.ParameterErrorCounter.WithLabelValues(err.Code).Inc() + } + return err +} + +func (r Request) observeJmapError(jerr jmap.Error) jmap.Error { + if jerr != nil { + r.g.metrics.JmapErrorCounter.WithLabelValues(r.session.JmapEndpoint, strconv.Itoa(jerr.Code())).Inc() + } + return jerr +} + +func (r Request) needTask(accountId string) (bool, Response) { + if !IgnoreSessionCapabilityChecks { + if r.session.Capabilities.Tasks == nil { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorMissingTasksSessionCapability), r.session.State) + } + } + return true, Response{} +} + +func (r Request) needTaskForAccount(accountId string) (bool, Response) { + if ok, resp := r.needTask(accountId); !ok { + return ok, resp + } + account, ok := r.session.Accounts[accountId] + if !ok { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorAccountNotFound), r.session.State) + } + if !IgnoreSessionCapabilityChecks { + if account.AccountCapabilities.Tasks == nil { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorMissingTasksAccountCapability), r.session.State) + } + } + return true, Response{} +} + +func (r Request) needTaskWithAccount() (bool, string, Response) { + accountId, err := r.GetAccountIdForTask() + if err != nil { + return false, "", errorResponse(single(accountId), err) + } + if !IgnoreSessionCapabilityChecks { + if ok, resp := r.needTaskForAccount(accountId); !ok { + return false, accountId, resp + } + } + return true, accountId, Response{} +} + +func (r Request) needCalendar(accountId string) (bool, Response) { + if !IgnoreSessionCapabilityChecks { + if r.session.Capabilities.Calendars == nil { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorMissingCalendarsSessionCapability), r.session.State) + } + } + return true, Response{} +} + +func (r Request) needCalendarForAccount(accountId string) (bool, Response) { + if ok, resp := r.needCalendar(accountId); !ok { + return ok, resp + } + account, ok := r.session.Accounts[accountId] + if !ok { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorAccountNotFound), r.session.State) + } + if !IgnoreSessionCapabilityChecks { + if account.AccountCapabilities.Calendars == nil { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorMissingCalendarsAccountCapability), r.session.State) + } + } + return true, Response{} +} + +func (r Request) needCalendarWithAccount() (bool, string, Response) { + accountId, err := r.GetAccountIdForCalendar() + if err != nil { + return false, "", errorResponse(single(accountId), err) + } + if !IgnoreSessionCapabilityChecks { + if ok, resp := r.needCalendarForAccount(accountId); !ok { + return false, accountId, resp + } + } + return true, accountId, Response{} +} + +func (r Request) needContact(accountId string) (bool, Response) { + if r.session.Capabilities.Contacts == nil { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorMissingContactsSessionCapability), r.session.State) + } + return true, Response{} +} + +func (r Request) needContactForAccount(accountId string) (bool, Response) { + if ok, resp := r.needContact(accountId); !ok { + return ok, resp + } + account, ok := r.session.Accounts[accountId] + if !ok { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorAccountNotFound), r.session.State) + } + if account.AccountCapabilities.Contacts == nil { + return false, errorResponseWithSessionState(single(accountId), r.apiError(&ErrorMissingContactsAccountCapability), r.session.State) + } + return true, Response{} +} + +func (r Request) needContactWithAccount() (bool, string, Response) { + accountId, err := r.GetAccountIdForContact() + if err != nil { + return false, "", errorResponse(single(accountId), err) + } + if ok, resp := r.needContactForAccount(accountId); !ok { + return false, accountId, resp + } + return true, accountId, Response{} +} diff --git a/services/groupware/pkg/groupware/groupware_response.go b/services/groupware/pkg/groupware/groupware_response.go new file mode 100644 index 0000000000..b82fb63d82 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_response.go @@ -0,0 +1,172 @@ +package groupware + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/jmap" +) + +type ResponseObjectType string + +const ( + IndexResponseObjectType = ResponseObjectType("index") + AccountResponseObjectType = ResponseObjectType("account") + IdentityResponseObjectType = ResponseObjectType("identity") + BlobResponseObjectType = ResponseObjectType("blob") + CalendarResponseObjectType = ResponseObjectType("calendar") + EventResponseObjectType = ResponseObjectType("event") + AddressBookResponseObjectType = ResponseObjectType("addressbook") + ContactResponseObjectType = ResponseObjectType("contact") + EmailResponseObjectType = ResponseObjectType("email") + MailboxResponseObjectType = ResponseObjectType("mailbox") + QuotaResponseObjectType = ResponseObjectType("quota") + TaskListResponseObjectType = ResponseObjectType("tasklist") + TaskResponseObjectType = ResponseObjectType("task") + VacationResponseResponseObjectType = ResponseObjectType("vacationresponse") +) + +type Response struct { + body any + status int + err *Error + etag jmap.State + objectType ResponseObjectType + accountIds []string + sessionState jmap.SessionState + contentLanguage jmap.Language +} + +func errorResponse(accountIds []string, err *Error) Response { + return Response{ + accountIds: accountIds, + body: nil, + err: err, + etag: "", + sessionState: "", + } +} + +func errorResponseWithSessionState(accountIds []string, err *Error, sessionState jmap.SessionState) Response { + return Response{ + accountIds: accountIds, + body: nil, + err: err, + etag: "", + sessionState: sessionState, + } +} + +func response(accountIds []string, body any, sessionState jmap.SessionState, contentLanguage jmap.Language) Response { + return Response{ + accountIds: accountIds, + body: body, + err: nil, + etag: jmap.State(sessionState), + sessionState: sessionState, + contentLanguage: contentLanguage, + } +} + +func etagResponse(accountIds []string, body any, sessionState jmap.SessionState, objectType ResponseObjectType, etag jmap.State, contentLanguage jmap.Language) Response { + return Response{ + accountIds: accountIds, + body: body, + err: nil, + etag: etag, + objectType: objectType, + sessionState: sessionState, + contentLanguage: contentLanguage, + } +} + +/* +func etagOnlyResponse(body any, etag jmap.State, objectType ResponseObjectType, contentLanguage jmap.Language) Response { + return Response{ + body: body, + err: nil, + etag: etag, + objectType: objectType, + sessionState: "", + contentLanguage: contentLanguage, + } +} +*/ + +func noContentResponse(accountIds []string, sessionState jmap.SessionState) Response { + return Response{ + accountIds: accountIds, + body: nil, + status: http.StatusNoContent, + err: nil, + etag: jmap.State(sessionState), + sessionState: sessionState, + } +} + +func noContentResponseWithEtag(accountIds []string, sessionState jmap.SessionState, objectType ResponseObjectType, etag jmap.State) Response { + return Response{ + accountIds: accountIds, + body: nil, + status: http.StatusNoContent, + err: nil, + etag: etag, + objectType: objectType, + sessionState: sessionState, + } +} + +/* +func acceptedResponse(sessionState jmap.SessionState) Response { + return Response{ + body: nil, + status: http.StatusAccepted, + err: nil, + etag: jmap.State(sessionState), + sessionState: sessionState, + } +} +*/ + +/* +func timeoutResponse(sessionState jmap.SessionState) Response { + return Response{ + body: nil, + status: http.StatusRequestTimeout, + err: nil, + etag: "", + sessionState: sessionState, + } +} +*/ + +func notFoundResponse(accountIds []string, sessionState jmap.SessionState) Response { + return Response{ + accountIds: accountIds, + body: nil, + status: http.StatusNotFound, + err: nil, + etag: "", + sessionState: sessionState, + } +} + +func etagNotFoundResponse(accountIds []string, sessionState jmap.SessionState, objectType ResponseObjectType, etag jmap.State, contentLanguage jmap.Language) Response { + return Response{ + accountIds: accountIds, + body: nil, + status: http.StatusNotFound, + err: nil, + etag: etag, + objectType: objectType, + sessionState: sessionState, + contentLanguage: contentLanguage, + } +} + +func notImplementesResponse() Response { + return Response{ + body: nil, + status: http.StatusNotImplemented, + err: nil, + } +} diff --git a/services/groupware/pkg/groupware/groupware_reva.go b/services/groupware/pkg/groupware/groupware_reva.go new file mode 100644 index 0000000000..4e2704f4ce --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_reva.go @@ -0,0 +1,52 @@ +package groupware + +import ( + "context" + "errors" + "net/http" + + userv1beta1 "github.com/cs3org/go-cs3apis/cs3/identity/user/v1beta1" + "github.com/opencloud-eu/opencloud/pkg/log" + revactx "github.com/opencloud-eu/reva/v2/pkg/ctx" +) + +// UsernameProvider implementation that uses Reva's enrichment of the Context +// to retrieve the current username. +type revaContextUserProvider struct { +} + +var _ userProvider = revaContextUserProvider{} + +func newRevaContextUsernameProvider() userProvider { + return revaContextUserProvider{} +} + +// var errUserNotInContext = fmt.Errorf("user not in context") + +var ( + errUserNotInRevaContext = errors.New("failed to find user in reva context") +) + +func (r revaContextUserProvider) GetUser(req *http.Request, ctx context.Context, logger *log.Logger) (user, error) { + u, ok := revactx.ContextGetUser(ctx) + if !ok { + err := errUserNotInRevaContext + logger.Error().Err(err).Ctx(ctx).Msgf("could not get user: user not in reva context: %v", ctx) + return nil, err + } + return revaUser{user: u}, nil +} + +type revaUser struct { + user *userv1beta1.User +} + +func (r revaUser) GetUsername() string { + return r.user.GetUsername() +} + +func (r revaUser) GetId() string { + return r.user.GetId().GetOpaqueId() +} + +var _ user = revaUser{} diff --git a/services/groupware/pkg/groupware/groupware_route.go b/services/groupware/pkg/groupware/groupware_route.go new file mode 100644 index 0000000000..9dcf755851 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_route.go @@ -0,0 +1,181 @@ +package groupware + +import ( + "net/http" + + "github.com/go-chi/chi/v5" +) + +var ( + defaultAccountIds = []string{"_", "*"} +) + +const ( + UriParamAccountId = "accountid" + UriParamMailboxId = "mailboxid" + UriParamEmailId = "emailid" + UriParamIdentityId = "identityid" + UriParamBlobId = "blobid" + UriParamBlobName = "blobname" + UriParamStreamId = "stream" + UriParamRole = "role" + UriParamAddressBookId = "addressbookid" + UriParamCalendarId = "calendarid" + UriParamTaskListId = "tasklistid" + UriParamContactId = "contactid" + UriParamEventId = "eventid" + QueryParamMailboxSearchName = "name" + QueryParamMailboxSearchRole = "role" + QueryParamMailboxSearchSubscribed = "subscribed" + QueryParamBlobType = "type" + QueryParamSince = "since" + QueryParamMaxChanges = "maxchanges" + QueryParamMailboxId = "mailbox" + QueryParamIdentityId = "identity" + QueryParamMoveFromMailboxId = "move-from" + QueryParamMoveToMailboxId = "move-to" + QueryParamNotInMailboxId = "notmailbox" + QueryParamSearchText = "text" + QueryParamSearchFrom = "from" + QueryParamSearchTo = "to" + QueryParamSearchCc = "cc" + QueryParamSearchBcc = "bcc" + QueryParamSearchSubject = "subject" + QueryParamSearchBody = "body" + QueryParamSearchBefore = "before" + QueryParamSearchAfter = "after" + QueryParamSearchMinSize = "minsize" + QueryParamSearchMaxSize = "maxsize" + QueryParamSearchKeyword = "keyword" + QueryParamSearchMessageId = "messageId" + QueryParamOffset = "offset" + QueryParamLimit = "limit" + QueryParamDays = "days" + QueryParamPartId = "partId" + QueryParamAttachmentName = "name" + QueryParamAttachmentBlobId = "blobId" + QueryParamSeen = "seen" + QueryParamUndesirable = "undesirable" + QueryParamMarkAsSeen = "markAsSeen" + HeaderSince = "if-none-match" +) + +func (g *Groupware) Route(r chi.Router) { + r.Get("/", g.Index) + r.Route("/accounts", func(r chi.Router) { + r.Get("/", g.GetAccountsWithTheirIdentities) + r.Route("/all", func(r chi.Router) { + r.Get("/", g.GetAccounts) + r.Route("/mailboxes", func(r chi.Router) { + r.Get("/", g.GetMailboxesForAllAccounts) // ?role= + r.Get("/changes", g.GetMailboxChangesForAllAccounts) + r.Get("/roles", g.GetMailboxRoles) // ?role= + r.Get("/roles/{role}", g.GetMailboxByRoleForAllAccounts) // ?role= + }) + r.Route("/emails", func(r chi.Router) { + r.Get("/", g.GetEmailsForAllAccounts) + r.Get("/latest/summary", g.GetLatestEmailsSummaryForAllAccounts) // ?limit=10&seen=true&undesirable=true + }) + r.Route("/quota", func(r chi.Router) { + r.Get("/", g.GetQuotaForAllAccounts) + }) + }) + r.Route("/{accountid}", func(r chi.Router) { + r.Get("/", g.GetAccount) + r.Route("/identities", func(r chi.Router) { + r.Get("/", g.GetIdentities) + r.Post("/", g.AddIdentity) + r.Route("/{identityid}", func(r chi.Router) { + r.Get("/", g.GetIdentityById) + r.Patch("/", g.ModifyIdentity) + r.Delete("/", g.DeleteIdentity) + }) + }) + r.Get("/vacation", g.GetVacation) + r.Put("/vacation", g.SetVacation) + r.Get("/quota", g.GetQuota) + r.Route("/mailboxes", func(r chi.Router) { + r.Get("/", g.GetMailboxes) // ?name=&role=&subcribed= + r.Post("/", g.CreateMailbox) + r.Route("/{mailboxid}", func(r chi.Router) { + r.Get("/", g.GetMailbox) + r.Get("/emails", g.GetAllEmailsInMailbox) + r.Get("/changes", g.GetMailboxChanges) + r.Patch("/", g.UpdateMailbox) + r.Delete("/", g.DeleteMailbox) + }) + }) + r.Route("/emails", func(r chi.Router) { + r.Get("/", g.GetEmails) // ?fetchemails=true&fetchbodies=true&text=&subject=&body=&keyword=&keyword=&... + r.Post("/", g.CreateEmail) + r.Delete("/", g.DeleteEmails) + r.Route("/{emailid}", func(r chi.Router) { + r.Get("/", g.GetEmailsById) // Accept:message/rfc822 + r.Put("/", g.ReplaceEmail) + r.Post("/", g.SendEmail) + r.Patch("/", g.UpdateEmail) + r.Delete("/", g.DeleteEmail) + Report(r, "/", g.RelatedToEmail) + r.Route("/related", func(r chi.Router) { + r.Get("/", g.RelatedToEmail) + }) + r.Route("/keywords", func(r chi.Router) { + r.Patch("/", g.UpdateEmailKeywords) + r.Post("/", g.AddEmailKeywords) + r.Delete("/", g.RemoveEmailKeywords) + }) + r.Route("/attachments", func(r chi.Router) { + r.Get("/", g.GetEmailAttachments) // ?partId=&name=?&blobId=? + }) + }) + }) + r.Route("/blobs", func(r chi.Router) { + r.Get("/{blobid}", g.GetBlobMeta) + r.Get("/{blobid}/{blobname}", g.DownloadBlob) // ?type= + r.Post("/", g.UploadBlob) + }) + r.Route("/ical", func(r chi.Router) { + r.Get("/{blobid}", g.ParseIcalBlob) + }) + r.Route("/addressbooks", func(r chi.Router) { + r.Get("/", g.GetAddressbooks) + r.Route("/{addressbookid}", func(r chi.Router) { + r.Get("/", g.GetAddressbook) + r.Get("/contacts", g.GetContactsInAddressbook) + }) + }) + r.Route("/contacts", func(r chi.Router) { + r.Post("/", g.CreateContact) + r.Delete("/{contactid}", g.DeleteContact) + r.Get("/{contactid}", g.GetContactById) + }) + r.Route("/calendars", func(r chi.Router) { + r.Get("/", g.GetCalendars) + r.Route("/{calendarid}", func(r chi.Router) { + r.Get("/", g.GetCalendarById) + r.Get("/events", g.GetEventsInCalendar) + }) + }) + r.Route("/events", func(r chi.Router) { + r.Post("/", g.CreateCalendarEvent) + r.Delete("/{eventid}", g.DeleteCalendarEvent) + }) + r.Route("/tasklists", func(r chi.Router) { + r.Get("/", g.GetTaskLists) + r.Route("/{tasklistid}", func(r chi.Router) { + r.Get("/", g.GetTaskListById) + r.Get("/tasks", g.GetTasksInTaskList) + }) + }) + }) + }) + + r.HandleFunc("/events/{stream}", g.ServeSSE) + + r.NotFound(g.NotFound) + r.MethodNotAllowed(g.MethodNotAllowed) +} + +func Report(r chi.Router, pattern string, h http.HandlerFunc) { + r.MethodFunc("REPORT", pattern, h) +} diff --git a/services/groupware/pkg/groupware/groupware_session.go b/services/groupware/pkg/groupware/groupware_session.go new file mode 100644 index 0000000000..9b1f0eab9b --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_session.go @@ -0,0 +1,324 @@ +package groupware + +import ( + "context" + "fmt" + "net/url" + "time" + + "github.com/jellydator/ttlcache/v3" + "github.com/miekg/dns" + "github.com/prometheus/client_golang/prometheus" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" +) + +// An alias for the internal session cache key, which might become something composed in the future. +type sessionCacheKey string + +func toSessionCacheKey(username string) sessionCacheKey { + return sessionCacheKey(username) +} + +func (k sessionCacheKey) username() string { + return string(k) +} + +// Interface for cached sessions in the session cache. +// The purpose here is mainly to be able to also persist failed +// attempts to retrieve a session. +type cachedSession interface { + // Whether the Session retrieval was successful or not. + Success() bool + // When Success() returns true, one may use this method to retrieve the actual JMAP Session. + Get() jmap.Session + // When Success() returns false, one may use this method to retrieve the error that caused the failure. + Error() *GroupwareError + // The timestamp of when this cached session information was obtained, regardless of success or failure. + Since() time.Time + // The timestamp of when this cached session information will be invalidated, regardless of success or failure. + Until() time.Time +} + +// An implementation of a cachedSession that succeeded. +type succeededSession struct { + // Timestamp of when this succeededSession was created. + since time.Time + // Until when the session will be cached + until time.Time + // The JMAP Session itself. + session jmap.Session +} + +var _ cachedSession = succeededSession{} + +func (s succeededSession) Success() bool { + return true +} +func (s succeededSession) Get() jmap.Session { + return s.session +} +func (s succeededSession) Error() *GroupwareError { + return nil +} +func (s succeededSession) Since() time.Time { + return s.since +} +func (s succeededSession) Until() time.Time { + return s.until +} + +// An implementation of a cachedSession that failed. +type failedSession struct { + // Timestamp of when this failedSession was created. + since time.Time + // Until when the failure will be cached, without re-attempting to retrieve the Session. + until time.Time + // The error that caused the Session acquisition to fail. + err *GroupwareError +} + +var _ cachedSession = failedSession{} + +func (s failedSession) Success() bool { + return false +} +func (s failedSession) Get() jmap.Session { + panic(fmt.Sprintf("never call %T.Get()", failedSession{})) +} +func (s failedSession) Error() *GroupwareError { + return s.err +} +func (s failedSession) Since() time.Time { + return s.since +} +func (s failedSession) Until() time.Time { + return s.until +} + +// Implements the ttlcache.Loader interface, by loading JMAP Sessions for users +// using the jmap.Client. +type sessionCacheLoader struct { + logger *log.Logger + // A minimalistic contract for supplying the JMAP Session URL for a given username. + sessionUrlProvider func(username string) (*url.URL, *GroupwareError) + // A minimalistic contract for supplying JMAP Sessions using various input parameters. + sessionSupplier func(sessionUrl *url.URL, username string, logger *log.Logger) (jmap.Session, jmap.Error) + errorTtl time.Duration +} + +var _ ttlcache.Loader[sessionCacheKey, cachedSession] = &sessionCacheLoader{} + +func (l *sessionCacheLoader) Load(c *ttlcache.Cache[sessionCacheKey, cachedSession], key sessionCacheKey) *ttlcache.Item[sessionCacheKey, cachedSession] { + username := key.username() + sessionUrl, gwerr := l.sessionUrlProvider(username) + if gwerr != nil { + l.logger.Warn().Str("username", username).Str("code", gwerr.Code).Msgf("failed to determine session URL for '%v'", key) + now := time.Now() + until := now.Add(l.errorTtl) + return c.Set(key, failedSession{since: now, until: until, err: gwerr}, l.errorTtl) + } + session, jerr := l.sessionSupplier(sessionUrl, username, l.logger) + if jerr != nil { + l.logger.Warn().Str("username", username).Err(jerr).Msgf("failed to create session for '%v'", key) + now := time.Now() + until := now.Add(l.errorTtl) + return c.Set(key, failedSession{since: now, until: until, err: groupwareErrorFromJmap(jerr)}, l.errorTtl) + } else { + l.logger.Debug().Str("username", username).Msgf("successfully created session for '%v'", key) + now := time.Now() + until := now.Add(ttlcache.DefaultTTL) + return c.Set(key, succeededSession{since: now, until: until, session: session}, ttlcache.DefaultTTL) // use the TTL configured on the Cache + } +} + +type sessionCache interface { + Get(username string) cachedSession + jmap.SessionEventListener +} + +type ttlcacheSessionCache struct { + sessionCache *ttlcache.Cache[sessionCacheKey, cachedSession] + outdatedSessionCounter prometheus.Counter + logger *log.Logger +} + +var _ sessionCache = &ttlcacheSessionCache{} +var _ jmap.SessionEventListener = &ttlcacheSessionCache{} + +func (c *ttlcacheSessionCache) Get(username string) cachedSession { + item := c.sessionCache.Get(toSessionCacheKey(username)) + if item != nil { + return item.Value() + } else { + return nil + } +} + +type sessionCacheBuilder struct { + logger *log.Logger + sessionSupplier func(sessionUrl *url.URL, username string, logger *log.Logger) (jmap.Session, jmap.Error) + defaultUrlResolver func(string) (*url.URL, *GroupwareError) + sessionUrlResolverFactory func() (func(string) (*url.URL, *GroupwareError), *GroupwareInitializationError) + prometheusRegistry prometheus.Registerer + m *metrics.Metrics + sessionCacheMaxCapacity uint64 + sessionCacheTtl time.Duration + sessionFailureCacheTtl time.Duration +} + +func newSessionCacheBuilder( + sessionUrl *url.URL, + logger *log.Logger, + sessionSupplier func(sessionUrl *url.URL, username string, logger *log.Logger) (jmap.Session, jmap.Error), + prometheusRegistry prometheus.Registerer, + m *metrics.Metrics, + sessionCacheMaxCapacity uint64, + sessionCacheTtl time.Duration, + sessionFailureCacheTtl time.Duration, +) *sessionCacheBuilder { + defaultUrlResolver := func(_ string) (*url.URL, *GroupwareError) { + return sessionUrl, nil + } + + return &sessionCacheBuilder{ + logger: logger, + sessionSupplier: sessionSupplier, + defaultUrlResolver: defaultUrlResolver, + sessionUrlResolverFactory: func() (func(string) (*url.URL, *GroupwareError), *GroupwareInitializationError) { + return defaultUrlResolver, nil + }, + prometheusRegistry: prometheusRegistry, + m: m, + sessionCacheMaxCapacity: sessionCacheMaxCapacity, + sessionCacheTtl: sessionCacheTtl, + sessionFailureCacheTtl: sessionFailureCacheTtl, + } +} + +func (b *sessionCacheBuilder) withDnsAutoDiscovery( + defaultSessionDomain string, + config *dns.ClientConfig, + dnsDialTimeout time.Duration, + dnsReadTimeout time.Duration, + domainGreenList []string, + domainRedList []string, +) *sessionCacheBuilder { + dnsSessionUrlResolverFactory := func() (func(string) (*url.URL, *GroupwareError), *GroupwareInitializationError) { + d, err := NewDnsSessionUrlResolver( + b.defaultUrlResolver, + defaultSessionDomain, + config, + domainGreenList, + domainRedList, + dnsDialTimeout, + dnsReadTimeout, + ) + if err != nil { + return nil, &GroupwareInitializationError{Message: "failed to instantiate the DNS session URL resolver", Err: err} + } else { + return d.Resolve, nil + } + } + b.sessionUrlResolverFactory = dnsSessionUrlResolverFactory + return b +} + +func (b sessionCacheBuilder) build() (sessionCache, error) { + var cache *ttlcache.Cache[sessionCacheKey, cachedSession] + + sessionUrlResolver, err := b.sessionUrlResolverFactory() + if err != nil { + return nil, err + } + + sessionLoader := &sessionCacheLoader{ + logger: b.logger, + sessionSupplier: b.sessionSupplier, + errorTtl: b.sessionFailureCacheTtl, + sessionUrlProvider: sessionUrlResolver, + } + + cache = ttlcache.New( + ttlcache.WithCapacity[sessionCacheKey, cachedSession](b.sessionCacheMaxCapacity), + ttlcache.WithTTL[sessionCacheKey, cachedSession](b.sessionCacheTtl), + ttlcache.WithDisableTouchOnHit[sessionCacheKey, cachedSession](), + ttlcache.WithLoader(sessionLoader), + ) + + b.prometheusRegistry.Register(sessionCacheMetricsCollector{desc: b.m.SessionCacheDesc, supply: cache.Metrics}) + + cache.OnEviction(func(c context.Context, r ttlcache.EvictionReason, item *ttlcache.Item[sessionCacheKey, cachedSession]) { + if b.logger.Trace().Enabled() { + reason := "" + switch r { + case ttlcache.EvictionReasonDeleted: + reason = "deleted" + case ttlcache.EvictionReasonCapacityReached: + reason = "capacity reached" + case ttlcache.EvictionReasonExpired: + reason = fmt.Sprintf("expired after %v", item.TTL()) + case ttlcache.EvictionReasonMaxCostExceeded: + reason = "max cost exceeded" + } + if reason == "" { + reason = fmt.Sprintf("unknown (%v)", r) + } + spentInCache := time.Since(item.Value().Since()) + tipe := "successful" + if !item.Value().Success() { + tipe = "failed" + } + b.logger.Trace().Msgf("%s session cache eviction of user '%v' after %v: %v", tipe, item.Key(), spentInCache, reason) + } + }) + + s := &ttlcacheSessionCache{ + sessionCache: cache, + logger: b.logger, + outdatedSessionCounter: b.m.OutdatedSessionsCounter, + } + + go cache.Start() + + return s, nil +} + +func (c ttlcacheSessionCache) OnSessionOutdated(session *jmap.Session, newSessionState jmap.SessionState) { + // it's enough to remove the session from the cache, as it will be fetched on-demand + // the next time an operation is performed on behalf of the user + c.sessionCache.Delete(toSessionCacheKey(session.Username)) + if c.outdatedSessionCounter != nil { + c.outdatedSessionCounter.Inc() + } + + c.logger.Trace().Msgf("removed outdated session for user '%v': state %v -> %v", session.Username, session.State, newSessionState) +} + +// A Prometheus Collector for the Session cache metrics. +type sessionCacheMetricsCollector struct { + desc *prometheus.Desc + supply func() ttlcache.Metrics +} + +func (s sessionCacheMetricsCollector) Describe(ch chan<- *prometheus.Desc) { + ch <- s.desc +} +func (s sessionCacheMetricsCollector) Collect(ch chan<- prometheus.Metric) { + m := s.supply() + ch <- prometheus.MustNewConstMetric(s.desc, prometheus.GaugeValue, float64(m.Evictions), metrics.Values.SessionCache.Evictions) + ch <- prometheus.MustNewConstMetric(s.desc, prometheus.GaugeValue, float64(m.Insertions), metrics.Values.SessionCache.Insertions) + ch <- prometheus.MustNewConstMetric(s.desc, prometheus.GaugeValue, float64(m.Hits), metrics.Values.SessionCache.Hits) + ch <- prometheus.MustNewConstMetric(s.desc, prometheus.GaugeValue, float64(m.Misses), metrics.Values.SessionCache.Misses) +} + +var _ prometheus.Collector = sessionCacheMetricsCollector{} + +// Create a new log.Logger that is decorated with fields containing information about the Session. +func decorateLogger(l *log.Logger, session jmap.Session) *log.Logger { + return log.From(l.With(). + Str(logUsername, log.SafeString(session.Username)). + Str(logSessionState, log.SafeString(string(session.State)))) +} diff --git a/services/groupware/pkg/groupware/groupware_test.go b/services/groupware/pkg/groupware/groupware_test.go new file mode 100644 index 0000000000..6125e9b664 --- /dev/null +++ b/services/groupware/pkg/groupware/groupware_test.go @@ -0,0 +1,64 @@ +package groupware + +import ( + "slices" + "testing" + + "github.com/opencloud-eu/opencloud/pkg/jmap" + "github.com/opencloud-eu/opencloud/pkg/structs" + "github.com/stretchr/testify/require" +) + +func TestSanitizeEmail(t *testing.T) { + email := jmap.Email{ + Subject: "test", + BodyValues: map[string]jmap.EmailBodyValue{ + "koze92I1": { + Value: `Cyberdyne`, + }, + "zee7urae": { + Value: `Hello. Click here for AI slop.`, + }, + }, + HtmlBody: []jmap.EmailBodyPart{ + { + PartId: "koze92I1", + Type: "text/html", + Size: 71, + }, + { + PartId: "zee7urae", + Type: "text/html", + Size: 81, + }, + }, + } + + g := &Groupware{config: groupwareConfig{sanitize: true}} + req := Request{g: g} + + safe, err := req.sanitizeEmail(email) + + require := require.New(t) + require.Nil(err) + require.Equal(`Cyberdyne`, safe.BodyValues["koze92I1"].Value) + require.Equal(63, safe.HtmlBody[0].Size) + require.Equal(`Hello. Click here for AI slop.`, safe.BodyValues["zee7urae"].Value) + require.Equal(30, safe.HtmlBody[1].Size) +} + +func TestSortMailboxes(t *testing.T) { + o := -10 + mailboxes := []jmap.Mailbox{ + {Id: "a", Name: "Other"}, + {Id: "b", Role: jmap.JmapMailboxRoleSent, Name: "Sent"}, + {Id: "c", Name: "Zebras"}, + {Id: "d", Role: jmap.JmapMailboxRoleInbox, Name: "Inbox"}, + {Id: "e", Name: "Appraisal"}, + {Id: "f", Name: "Zealots", SortOrder: &o}, + } + slices.SortFunc(mailboxes, compareMailboxes) + names := structs.Map(mailboxes, func(m jmap.Mailbox) string { return m.Name }) + require := require.New(t) + require.Equal([]string{"Zealots", "Inbox", "Sent", "Appraisal", "Other", "Zebras"}, names) +} diff --git a/services/groupware/pkg/logging/logging.go b/services/groupware/pkg/logging/logging.go new file mode 100644 index 0000000000..c8756ebcc3 --- /dev/null +++ b/services/groupware/pkg/logging/logging.go @@ -0,0 +1,17 @@ +package logging + +import ( + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" +) + +// Configure initializes a service-specific logger instance. +func Configure(name string, cfg *config.Log) log.Logger { + return log.NewLogger( + log.Name(name), + log.Level(cfg.Level), + log.Pretty(cfg.Pretty), + log.Color(cfg.Color), + log.File(cfg.File), + ) +} diff --git a/services/groupware/pkg/metrics/http_metrics.go b/services/groupware/pkg/metrics/http_metrics.go new file mode 100644 index 0000000000..3cd08592e7 --- /dev/null +++ b/services/groupware/pkg/metrics/http_metrics.go @@ -0,0 +1,11 @@ +package metrics + +type HttpMetrics struct { +} + +// New initializes the available metrics. +func NewHttpMetrics() *HttpMetrics { + m := &HttpMetrics{} + + return m +} diff --git a/services/groupware/pkg/metrics/metrics.go b/services/groupware/pkg/metrics/metrics.go new file mode 100644 index 0000000000..02ac85f4fd --- /dev/null +++ b/services/groupware/pkg/metrics/metrics.go @@ -0,0 +1,343 @@ +package metrics + +import ( + "reflect" + "strconv" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/prometheus/client_golang/prometheus" +) + +const ( + // Namespace defines the namespace for the defines metrics. + Namespace = "opencloud" + + // Subsystem defines the subsystem for the defines metrics. + Subsystem = "groupware" +) + +// Metrics defines the available metrics of this service. +type Metrics struct { + SessionCacheDesc *prometheus.Desc + EventBufferSizeDesc *prometheus.Desc + EventBufferQueuedOpts prometheus.GaugeOpts + SSESubscribersOpts prometheus.GaugeOpts + WorkersBufferSizeDesc *prometheus.Desc + WorkersBufferQueuedOpts prometheus.GaugeOpts + TotalWorkersDesc *prometheus.Desc + BusyWorkersOpts prometheus.GaugeOpts + + JmapErrorCounter *prometheus.CounterVec + ParameterErrorCounter *prometheus.CounterVec + AuthenticationFailureCounter prometheus.Counter + SessionFailureCounter prometheus.Counter + SSEEventsCounter *prometheus.CounterVec + OutdatedSessionsCounter prometheus.Counter + + SuccessfulRequestPerEndpointCounter *prometheus.CounterVec + FailedRequestPerEndpointCounter *prometheus.CounterVec + FailedRequestStatusPerEndpointCounter *prometheus.CounterVec + ResponseBodyReadingErrorPerEndpointCounter *prometheus.CounterVec + ResponseBodyUnmarshallingErrorPerEndpointCounter *prometheus.CounterVec + + EmailByIdDuration *prometheus.HistogramVec + EmailSameSenderDuration *prometheus.HistogramVec + EmailSameThreadDuration *prometheus.HistogramVec +} + +var Labels = struct { + Endpoint string + Result string + SessionCacheType string + RequestId string + TraceId string + SSEType string + ErrorCode string + HttpStatusCode string +}{ + Endpoint: "endpoint", + Result: "result", + SessionCacheType: "type", + RequestId: "requestID", + TraceId: "traceID", + SSEType: "type", + ErrorCode: "code", + HttpStatusCode: "statusCode", +} + +var Values = struct { + Result struct { + Found string + NotFound string + Success string + Failure string + } + SessionCache struct { + Insertions string + Hits string + Misses string + Evictions string + } +}{ + Result: struct { + Found string + NotFound string + Success string + Failure string + }{ + Found: "found", + NotFound: "not-found", + Success: "success", + Failure: "failure", + }, + SessionCache: struct { + Insertions string + Hits string + Misses string + Evictions string + }{ + Insertions: "insertions", + Hits: "hits", + Misses: "misses", + Evictions: "evictions", + }, +} + +// New initializes the available metrics. +func New(registerer prometheus.Registerer, logger *log.Logger) *Metrics { + m := &Metrics{ + SessionCacheDesc: prometheus.NewDesc( + prometheus.BuildFQName(Namespace, Subsystem, "session_cache"), + "Session cache statistics", + []string{Labels.SessionCacheType}, + nil, + ), + EventBufferSizeDesc: prometheus.NewDesc( + prometheus.BuildFQName(Namespace, Subsystem, "event_buffer_size"), + "Size of the buffer channel for server-sent events to process", + nil, + nil, + ), + EventBufferQueuedOpts: prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "event_buffer_queued", + Help: "Number of queued server-sent events", + }, + SSESubscribersOpts: prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "sse_subscribers", + Help: "Number of subscribers for server-sent event streams", + }, + WorkersBufferSizeDesc: prometheus.NewDesc( + prometheus.BuildFQName(Namespace, Subsystem, "workers_buffer_size"), + "Size of the buffer channel for background worker jobs", + nil, + nil, + ), + WorkersBufferQueuedOpts: prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "workers_buffer_queued", + Help: "Number of queued background jobs", + }, + TotalWorkersDesc: prometheus.NewDesc( + prometheus.BuildFQName(Namespace, Subsystem, "workers_total"), + "Total amount of background job workers", + nil, + nil, + ), + BusyWorkersOpts: prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "workers_busy", + Help: "Number of background job workers that are currently busy executing jobs", + }, + AuthenticationFailureCounter: prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "auth_failures_count", + Help: "Number of failed authentications", + }), + SessionFailureCounter: prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "session_failures_count", + Help: "Number of session retrieval failures", + }), + ParameterErrorCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "param_errors_count", + Help: "Number of invalid request parameter errors that occured", + }, []string{Labels.ErrorCode}), + JmapErrorCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "jmap_errors_count", + Help: "Number of JMAP errors that occured", + }, []string{Labels.Endpoint, Labels.ErrorCode}), + SuccessfulRequestPerEndpointCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "jmap_requests_count", + Help: "Number of JMAP requests", + ConstLabels: prometheus.Labels{ + Labels.Result: Values.Result.Success, + }, + }, []string{Labels.Endpoint}), + FailedRequestPerEndpointCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "jmap_requests_count", + Help: "Number of JMAP requests", + ConstLabels: prometheus.Labels{ + Labels.Result: Values.Result.Failure, + }, + }, []string{Labels.Endpoint}), + FailedRequestStatusPerEndpointCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "jmap_requests_failures_status_count", + Help: "Number of JMAP requests", + }, []string{Labels.Endpoint, Labels.HttpStatusCode}), + ResponseBodyReadingErrorPerEndpointCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "jmap_requests_body_reading_errors_count", + Help: "Number of JMAP body reading errors", + }, []string{Labels.Endpoint}), + ResponseBodyUnmarshallingErrorPerEndpointCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "jmap_requests_body_unmarshalling_errors_count", + Help: "Number of JMAP body unmarshalling errors", + }, []string{Labels.Endpoint}), + SSEEventsCounter: prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "sse_events_count", + Help: "Number of Server-Side Events that have been sent", + }, []string{Labels.SSEType}), + OutdatedSessionsCounter: prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "outdated_sessions_count", + Help: "Counts outdated session events", + }), + EmailByIdDuration: prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + NativeHistogramBucketFactor: 1.1, + Name: "email_by_id_duration_seconds", + Help: "Duration in seconds for retrieving an Email by its id", + }, []string{Labels.Endpoint, Labels.Result}), + EmailSameSenderDuration: prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + NativeHistogramBucketFactor: 1.1, + Name: "email_same_sender_duration_seconds", + Help: "Duration in seconds for searching for related same-sender Emails", + }, []string{Labels.Endpoint}), + EmailSameThreadDuration: prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + NativeHistogramBucketFactor: 1.1, + Name: "email_same_thread_duration_seconds", + Help: "Duration in seconds for searching for related same-thread Emails", + }, []string{Labels.Endpoint}), + } + + registerAll(registerer, m, logger) + + return m +} + +func WithExemplar(obs prometheus.Observer, value float64, requestId string, traceId string) { + obs.(prometheus.ExemplarObserver).ObserveWithExemplar(value, prometheus.Labels{Labels.RequestId: requestId, Labels.TraceId: traceId}) +} + +func registerAll(registerer prometheus.Registerer, m any, logger *log.Logger) { + r := reflect.ValueOf(m) + if r.Kind() == reflect.Pointer { + r = r.Elem() + } + total := 0 + succeeded := 0 + failed := 0 + for i := 0; i < r.NumField(); i++ { + n := r.Type().Field(i).Name + f := r.Field(i) + v := f.Interface() + c, ok := v.(prometheus.Collector) + if ok { + total++ + err := registerer.Register(c) + if err != nil { + failed++ + logger.Warn().Err(err).Msgf("failed to register metric '%s' (%T)", n, c) + } else { + succeeded++ + } + } + } + logger.Debug().Msgf("registered %d/%d metrics successfully (%d failed)", succeeded, total, failed) +} + +type ConstMetricCollector struct { + Metric prometheus.Metric +} + +func (c ConstMetricCollector) Describe(ch chan<- *prometheus.Desc) { + ch <- c.Metric.Desc() +} +func (c ConstMetricCollector) Collect(ch chan<- prometheus.Metric) { + ch <- c.Metric +} + +type LoggingPrometheusRegisterer struct { + delegate prometheus.Registerer + logger *log.Logger +} + +func NewLoggingPrometheusRegisterer(delegate prometheus.Registerer, logger *log.Logger) *LoggingPrometheusRegisterer { + return &LoggingPrometheusRegisterer{ + delegate: delegate, + logger: logger, + } +} + +func (r *LoggingPrometheusRegisterer) Register(c prometheus.Collector) error { + err := r.delegate.Register(c) + if err != nil { + switch err.(type) { + case prometheus.AlreadyRegisteredError: + // silently ignore this error, as this case can happen when the suture service decides to restart + err = nil + default: + r.logger.Warn().Err(err).Msgf("failed to register metric") + } + } + return err +} + +func (r *LoggingPrometheusRegisterer) MustRegister(collectors ...prometheus.Collector) { + for _, c := range collectors { + r.Register(c) + } +} + +func (r *LoggingPrometheusRegisterer) Unregister(c prometheus.Collector) bool { + return r.delegate.Unregister(c) +} + +var _ prometheus.Registerer = &LoggingPrometheusRegisterer{} + +func Endpoint(endpoint string) prometheus.Labels { + return prometheus.Labels{Labels.Endpoint: endpoint} +} + +func EndpointAndStatus(endpoint string, status int) prometheus.Labels { + return prometheus.Labels{Labels.Endpoint: endpoint, Labels.HttpStatusCode: strconv.Itoa(status)} +} diff --git a/services/groupware/pkg/metrics/startup_metrics.go b/services/groupware/pkg/metrics/startup_metrics.go new file mode 100644 index 0000000000..f762286243 --- /dev/null +++ b/services/groupware/pkg/metrics/startup_metrics.go @@ -0,0 +1,28 @@ +package metrics + +import ( + "sync/atomic" + + "github.com/opencloud-eu/opencloud/pkg/version" + "github.com/prometheus/client_golang/prometheus" +) + +var registered atomic.Bool + +func StartupMetrics(registerer prometheus.Registerer) { + // use an atomic boolean to make the operation idempotent, + // instead of causing a panic in case this function is + // called twice + if registered.CompareAndSwap(false, true) { + // https://github.com/prometheus/common/blob/8558a5b7db3c84fa38b4766966059a7bd5bfa2ee/version/info.go#L36-L56 + registerer.Register(prometheus.NewGaugeFunc(prometheus.GaugeOpts{ + Namespace: Namespace, + Subsystem: Subsystem, + Name: "build_info", + Help: "Build information", + ConstLabels: prometheus.Labels{ + "version": version.GetString(), + }, + }, func() float64 { return 1 })) + } +} diff --git a/services/groupware/pkg/middleware/auth.go b/services/groupware/pkg/middleware/auth.go new file mode 100644 index 0000000000..6c8b76fb82 --- /dev/null +++ b/services/groupware/pkg/middleware/auth.go @@ -0,0 +1,78 @@ +package middleware + +import ( + "net/http" + + gmmetadata "go-micro.dev/v4/metadata" + "google.golang.org/grpc/metadata" + + "github.com/opencloud-eu/opencloud/pkg/account" + "github.com/opencloud-eu/opencloud/pkg/log" + opkgm "github.com/opencloud-eu/opencloud/pkg/middleware" + "github.com/opencloud-eu/reva/v2/pkg/auth/scope" + revactx "github.com/opencloud-eu/reva/v2/pkg/ctx" + "github.com/opencloud-eu/reva/v2/pkg/token/manager/jwt" +) + +// authOptions initializes the available default options. +func authOptions(opts ...account.Option) account.Options { + opt := account.Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +func Auth(opts ...account.Option) func(http.Handler) http.Handler { + opt := authOptions(opts...) + tokenManager, err := jwt.New(map[string]any{ + "secret": opt.JWTSecret, + "expires": int64(24 * 60 * 60), + }) + if err != nil { + opt.Logger.Fatal().Err(err).Msgf("Could not initialize token-manager") + } + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + t := r.Header.Get(revactx.TokenHeader) + if t == "" { + opt.Logger.Error().Str(log.RequestIDString, r.Header.Get("X-Request-ID")).Msgf("missing access token in header %v", revactx.TokenHeader) + w.WriteHeader(http.StatusUnauthorized) // missing access token + return + } + + u, tokenScope, err := tokenManager.DismantleToken(r.Context(), t) + if err != nil { + opt.Logger.Error().Str(log.RequestIDString, r.Header.Get("X-Request-ID")).Err(err).Msgf("invalid access token in header %v", revactx.TokenHeader) + w.WriteHeader(http.StatusUnauthorized) // invalid token + return + } + if ok, err := scope.VerifyScope(ctx, tokenScope, r); err != nil || !ok { + opt.Logger.Error().Str(log.RequestIDString, r.Header.Get("X-Request-ID")).Err(err).Msg("verifying scope failed") + w.WriteHeader(http.StatusUnauthorized) // invalid scope + return + } + + ctx = revactx.ContextSetToken(ctx, t) + ctx = revactx.ContextSetUser(ctx, u) + ctx = gmmetadata.Set(ctx, opkgm.AccountID, u.GetId().GetOpaqueId()) + if m := u.GetOpaque().GetMap(); m != nil { + if roles, ok := m["roles"]; ok { + ctx = gmmetadata.Set(ctx, opkgm.RoleIDs, string(roles.GetValue())) + } + } + ctx = metadata.AppendToOutgoingContext(ctx, revactx.TokenHeader, t) + + initiatorID := r.Header.Get(revactx.InitiatorHeader) + if initiatorID != "" { + ctx = revactx.ContextSetInitiator(ctx, initiatorID) + ctx = metadata.AppendToOutgoingContext(ctx, revactx.InitiatorHeader, initiatorID) + } + + next.ServeHTTP(w, r.WithContext(ctx)) + }) + } +} diff --git a/services/groupware/pkg/middleware/groupware_logger.go b/services/groupware/pkg/middleware/groupware_logger.go new file mode 100644 index 0000000000..bdea8fe7a6 --- /dev/null +++ b/services/groupware/pkg/middleware/groupware_logger.go @@ -0,0 +1,58 @@ +package middleware + +import ( + "net/http" + "time" + + "github.com/go-chi/chi/v5/middleware" + "github.com/opencloud-eu/opencloud/pkg/log" +) + +func GroupwareLogger(logger log.Logger) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + start := time.Now() + wrap := middleware.NewWrapResponseWriter(w, r.ProtoMajor) + next.ServeHTTP(wrap, r) + + level := logger.Debug() + err := recover() + if err != nil { + level = logger.Error() + } + + if !level.Enabled() { + return + } + + if err != nil { + switch e := err.(type) { + case error: + level = level.Err(e) + default: + level = level.Any("panic", e) + } + } + + ctx := r.Context() + + requestID := middleware.GetReqID(ctx) + traceID := GetTraceID(ctx) + + level.Str(log.RequestIDString, requestID) + + if traceID != "" { + level.Str("traceId", traceID) + } + + level. + Str("proto", r.Proto). + Str("method", r.Method). + Int("status", wrap.Status()). + Str("path", r.URL.Path). + Dur("duration", time.Since(start)). + Int("bytes", wrap.BytesWritten()). + Msg("") + }) + } +} diff --git a/services/groupware/pkg/middleware/traceid.go b/services/groupware/pkg/middleware/traceid.go new file mode 100644 index 0000000000..a71ea3236f --- /dev/null +++ b/services/groupware/pkg/middleware/traceid.go @@ -0,0 +1,42 @@ +package middleware + +import ( + "context" + "net/http" +) + +type ctxKeyTraceID int + +const TraceIDKey ctxKeyTraceID = 0 + +const maxTraceIdLength = 1024 + +var TraceIDHeader = "Trace-Id" + +func TraceID(next http.Handler) http.Handler { + fn := func(w http.ResponseWriter, r *http.Request) { + traceID := r.Header.Get(TraceIDHeader) + if traceID != "" { + runes := []rune(traceID) + if len(runes) > maxTraceIdLength { + traceID = string(runes[0:maxTraceIdLength]) + } + w.Header().Add(TraceIDHeader, traceID) + ctx := context.WithValue(r.Context(), TraceIDKey, traceID) + next.ServeHTTP(w, r.WithContext(ctx)) + } else { + next.ServeHTTP(w, r) + } + } + return http.HandlerFunc(fn) +} + +func GetTraceID(ctx context.Context) string { + if ctx == nil { + return "" + } + if traceID, ok := ctx.Value(TraceIDKey).(string); ok { + return traceID + } + return "" +} diff --git a/services/groupware/pkg/server/debug/option.go b/services/groupware/pkg/server/debug/option.go new file mode 100644 index 0000000000..64ff971d09 --- /dev/null +++ b/services/groupware/pkg/server/debug/option.go @@ -0,0 +1,50 @@ +package debug + +import ( + "context" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" +) + +// Option defines a single option function. +type Option func(o *Options) + +// Options defines the available options for this package. +type Options struct { + Logger log.Logger + Context context.Context + Config *config.Config +} + +// newOptions initializes the available default options. +func newOptions(opts ...Option) Options { + opt := Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +// Logger provides a function to set the logger option. +func Logger(val log.Logger) Option { + return func(o *Options) { + o.Logger = val + } +} + +// Context provides a function to set the context option. +func Context(val context.Context) Option { + return func(o *Options) { + o.Context = val + } +} + +// Config provides a function to set the config option. +func Config(val *config.Config) Option { + return func(o *Options) { + o.Config = val + } +} diff --git a/services/groupware/pkg/server/debug/server.go b/services/groupware/pkg/server/debug/server.go new file mode 100644 index 0000000000..11e2e4d44e --- /dev/null +++ b/services/groupware/pkg/server/debug/server.go @@ -0,0 +1,28 @@ +package debug + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/handlers" + "github.com/opencloud-eu/opencloud/pkg/service/debug" + "github.com/opencloud-eu/opencloud/pkg/version" +) + +// Server initializes the debug service and server. +func Server(opts ...Option) (*http.Server, error) { + options := newOptions(opts...) + + readyHandlerConfiguration := handlers.NewCheckHandlerConfiguration(). + WithLogger(options.Logger) + + return debug.NewService( + debug.Address(options.Config.Debug.Addr), + debug.Token(options.Config.Debug.Token), + debug.Pprof(options.Config.Debug.Pprof), + debug.Zpages(options.Config.Debug.Zpages), + debug.Logger(options.Logger), + debug.Name(options.Config.Service.Name), + debug.Version(version.GetString()), + debug.Ready(handlers.NewCheckHandler(readyHandlerConfiguration)), + ), nil +} diff --git a/services/groupware/pkg/server/http/option.go b/services/groupware/pkg/server/http/option.go new file mode 100644 index 0000000000..ed4d76a76f --- /dev/null +++ b/services/groupware/pkg/server/http/option.go @@ -0,0 +1,83 @@ +package http + +import ( + "context" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" + "github.com/urfave/cli/v2" + "go.opentelemetry.io/otel/trace" + "go.opentelemetry.io/otel/trace/noop" +) + +// Option defines a single option function. +type Option func(o *Options) + +// Options defines the available options for this package. +type Options struct { + Namespace string + Logger log.Logger + Context context.Context + Config *config.Config + Metrics *metrics.HttpMetrics + Flags []cli.Flag + TraceProvider trace.TracerProvider +} + +// newOptions initializes the available default options. +func newOptions(opts ...Option) Options { + opt := Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +// Logger provides a function to set the logger option. +func Logger(val log.Logger) Option { + return func(o *Options) { + o.Logger = val + } +} + +// Context provides a function to set the context option. +func Context(val context.Context) Option { + return func(o *Options) { + o.Context = val + } +} + +// Config provides a function to set the config option. +func Config(val *config.Config) Option { + return func(o *Options) { + o.Config = val + } +} + +// Metrics provides a function to set the metrics option. +func Metrics(val *metrics.HttpMetrics) Option { + return func(o *Options) { + o.Metrics = val + } +} + +// Namespace provides a function to set the Namespace option. +func Namespace(val string) Option { + return func(o *Options) { + o.Namespace = val + } +} + +// TraceProvider provides a function to configure the trace provider +func TraceProvider(traceProvider trace.TracerProvider) Option { + return func(o *Options) { + if traceProvider != nil { + o.TraceProvider = traceProvider + } else { + o.TraceProvider = noop.NewTracerProvider() + } + } +} diff --git a/services/groupware/pkg/server/http/server.go b/services/groupware/pkg/server/http/server.go new file mode 100644 index 0000000000..90ebb81f5a --- /dev/null +++ b/services/groupware/pkg/server/http/server.go @@ -0,0 +1,77 @@ +package http + +import ( + "fmt" + + "github.com/go-chi/chi/v5/middleware" + "github.com/opencloud-eu/opencloud/pkg/account" + "github.com/opencloud-eu/opencloud/pkg/cors" + opencloudmiddleware "github.com/opencloud-eu/opencloud/pkg/middleware" + "github.com/opencloud-eu/opencloud/pkg/service/http" + "github.com/opencloud-eu/opencloud/pkg/version" + groupwaremiddleware "github.com/opencloud-eu/opencloud/services/groupware/pkg/middleware" + svc "github.com/opencloud-eu/opencloud/services/groupware/pkg/service/http/v0" + "go-micro.dev/v4" +) + +// Server initializes the http service and server. +func Server(opts ...Option) (http.Service, error) { + options := newOptions(opts...) + + service, err := http.NewService( + http.TLSConfig(options.Config.HTTP.TLS), + http.Logger(options.Logger), + http.Name(options.Config.Service.Name), + http.Version(version.GetString()), + http.Namespace(options.Config.HTTP.Namespace), + http.Address(options.Config.HTTP.Addr), + http.Context(options.Context), + http.TraceProvider(options.TraceProvider), + ) + if err != nil { + options.Logger.Error(). + Err(err). + Msg("Error initializing http service") + return http.Service{}, fmt.Errorf("could not initialize http service: %w", err) + } + + handle, err := svc.NewService( + svc.Logger(options.Logger), + svc.Config(options.Config), + svc.Middleware( + middleware.RealIP, + middleware.RequestID, + groupwaremiddleware.TraceID, + opencloudmiddleware.Cors( + cors.Logger(options.Logger), + cors.AllowedOrigins(options.Config.HTTP.CORS.AllowedOrigins), + cors.AllowedMethods(options.Config.HTTP.CORS.AllowedMethods), + cors.AllowedHeaders(options.Config.HTTP.CORS.AllowedHeaders), + cors.AllowCredentials(options.Config.HTTP.CORS.AllowCredentials), + ), + opencloudmiddleware.Version( + options.Config.Service.Name, + version.GetString(), + ), + groupwaremiddleware.GroupwareLogger(options.Logger), + groupwaremiddleware.Auth( + account.Logger(options.Logger), + account.JWTSecret(options.Config.TokenManager.JWTSecret), + ), + ), + ) + if err != nil { + return http.Service{}, err + } + + { + handle = svc.NewInstrument(handle, options.Metrics) + handle = svc.NewLogging(handle, options.Logger) + } + + if err := micro.RegisterHandler(service.Server(), handle); err != nil { + return http.Service{}, err + } + + return service, nil +} diff --git a/services/groupware/pkg/service/http/v0/instrument.go b/services/groupware/pkg/service/http/v0/instrument.go new file mode 100644 index 0000000000..bb63e5160e --- /dev/null +++ b/services/groupware/pkg/service/http/v0/instrument.go @@ -0,0 +1,25 @@ +package svc + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" +) + +// NewInstrument returns a service that instruments metrics. +func NewInstrument(next Service, metrics *metrics.HttpMetrics) Service { + return instrument{ + next: next, + metrics: metrics, + } +} + +type instrument struct { + next Service + metrics *metrics.HttpMetrics +} + +// ServeHTTP implements the Service interface. +func (i instrument) ServeHTTP(w http.ResponseWriter, r *http.Request) { + i.next.ServeHTTP(w, r) +} diff --git a/services/groupware/pkg/service/http/v0/logging.go b/services/groupware/pkg/service/http/v0/logging.go new file mode 100644 index 0000000000..c21734ce11 --- /dev/null +++ b/services/groupware/pkg/service/http/v0/logging.go @@ -0,0 +1,25 @@ +package svc + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/log" +) + +// NewLogging returns a service that logs messages. +func NewLogging(next Service, logger log.Logger) Service { + return logging{ + next: next, + logger: logger, + } +} + +type logging struct { + next Service + logger log.Logger +} + +// ServeHTTP implements the Service interface. +func (l logging) ServeHTTP(w http.ResponseWriter, r *http.Request) { + l.next.ServeHTTP(w, r) +} diff --git a/services/groupware/pkg/service/http/v0/option.go b/services/groupware/pkg/service/http/v0/option.go new file mode 100644 index 0000000000..d39a415d14 --- /dev/null +++ b/services/groupware/pkg/service/http/v0/option.go @@ -0,0 +1,52 @@ +package svc + +import ( + "net/http" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/config" + "go.opentelemetry.io/otel/trace" +) + +// Option defines a single option function. +type Option func(o *Options) + +// Options defines the available options for this package. +type Options struct { + Logger log.Logger + Config *config.Config + Middleware []func(http.Handler) http.Handler + TraceProvider trace.TracerProvider +} + +// newOptions initializes the available default options. +func newOptions(opts ...Option) Options { + opt := Options{} + + for _, o := range opts { + o(&opt) + } + + return opt +} + +// Logger provides a function to set the logger option. +func Logger(val log.Logger) Option { + return func(o *Options) { + o.Logger = val + } +} + +// Config provides a function to set the config option. +func Config(val *config.Config) Option { + return func(o *Options) { + o.Config = val + } +} + +// Middleware provides a function to set the middleware option. +func Middleware(val ...func(http.Handler) http.Handler) Option { + return func(o *Options) { + o.Middleware = val + } +} diff --git a/services/groupware/pkg/service/http/v0/service.go b/services/groupware/pkg/service/http/v0/service.go new file mode 100644 index 0000000000..432c7cece7 --- /dev/null +++ b/services/groupware/pkg/service/http/v0/service.go @@ -0,0 +1,64 @@ +package svc + +import ( + "fmt" + "net/http" + + "github.com/go-chi/chi/v5" + "github.com/prometheus/client_golang/prometheus" + "github.com/riandyrn/otelchi" + + "github.com/opencloud-eu/opencloud/pkg/log" + "github.com/opencloud-eu/opencloud/pkg/tracing" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/groupware" + "github.com/opencloud-eu/opencloud/services/groupware/pkg/metrics" +) + +// Service defines the service handlers. +type Service interface { + ServeHTTP(w http.ResponseWriter, r *http.Request) +} + +// NewService returns a service implementation for Service. +func NewService(opts ...Option) (Service, error) { + options := newOptions(opts...) + + m := chi.NewMux() + m.Use(options.Middleware...) + + m.Use( + otelchi.Middleware( + "groupware", + otelchi.WithChiRoutes(m), + otelchi.WithTracerProvider(options.TraceProvider), + otelchi.WithPropagators(tracing.GetPropagator()), + ), + ) + + logger := &options.Logger + + registerer := metrics.NewLoggingPrometheusRegisterer(prometheus.DefaultRegisterer, logger) + + gw, err := groupware.NewGroupware(options.Config, logger, m, registerer) + if err != nil { + return nil, err + } + + m.Route(options.Config.HTTP.Root, gw.Route) + + { + level := options.Logger.Debug() + if level.Enabled() { + routes := []string{} + _ = chi.Walk(m, func(method string, route string, _ http.Handler, middlewares ...func(http.Handler) http.Handler) error { + routes = append(routes, fmt.Sprintf("%s %s", method, route)) + return nil + }) + level.Array("routes", log.StringArray(routes)).Msgf("serving %v endpoints", len(routes)) + } + } + + metrics.StartupMetrics(registerer) + + return gw, nil +} diff --git a/services/groupware/pnpm-lock.yaml b/services/groupware/pnpm-lock.yaml new file mode 100644 index 0000000000..7ecf81fb8b --- /dev/null +++ b/services/groupware/pnpm-lock.yaml @@ -0,0 +1,2402 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@redocly/cli': + specifier: ^2.4.0 + version: 2.4.0(@opentelemetry/api@1.9.0)(ajv@8.17.1)(core-js@3.45.1) + '@types/js-yaml': + specifier: ^4.0.9 + version: 4.0.9 + cheerio: + specifier: ^1.1.2 + version: 1.1.2 + js-yaml: + specifier: ^4.1.0 + version: 4.1.0 + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@24.3.1)(typescript@5.9.2) + typescript: + specifier: ^5.9.2 + version: 5.9.2 + devDependencies: + '@types/node': + specifier: ^24.3.1 + version: 24.3.1 + +packages: + + '@babel/code-frame@7.27.1': + resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.27.1': + resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} + engines: {node: '>=6.9.0'} + + '@babel/runtime@7.28.4': + resolution: {integrity: sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==} + engines: {node: '>=6.9.0'} + + '@cspotcode/source-map-support@0.8.1': + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + + '@emotion/is-prop-valid@1.2.2': + resolution: {integrity: sha512-uNsoYd37AFmaCdXlg6EYD1KaPOaRWRByMCYzbKUX4+hhMfrxdVSelShywL4JVaAeM/eHUOSprYBQls+/neX3pw==} + + '@emotion/memoize@0.8.1': + resolution: {integrity: sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA==} + + '@emotion/unitless@0.8.1': + resolution: {integrity: sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ==} + + '@exodus/schemasafe@1.3.0': + resolution: {integrity: sha512-5Aap/GaRupgNx/feGBwLLTVv8OQFfv3pq2lPRzPg9R+IOBnDgghTGW7l7EuVXOvg5cc/xSAlRW8rBrjIC3Nvqw==} + + '@faker-js/faker@7.6.0': + resolution: {integrity: sha512-XK6BTq1NDMo9Xqw/YkYyGjSsg44fbNwYRx7QK2CuoQgyy+f1rrTDHoExVM5PsyXCtfl2vs2vVJ0MN0yN6LppRw==} + engines: {node: '>=14.0.0', npm: '>=6.0.0'} + + '@humanwhocodes/momoa@2.0.4': + resolution: {integrity: sha512-RE815I4arJFtt+FVeU1Tgp9/Xvecacji8w/V6XtXsWWH/wz/eNkNbhb+ny/+PlVZjV0rxQpRSQKNKE3lcktHEA==} + engines: {node: '>=10.10.0'} + + '@isaacs/balanced-match@4.0.1': + resolution: {integrity: sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==} + engines: {node: 20 || >=22} + + '@isaacs/brace-expansion@5.0.0': + resolution: {integrity: sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==} + engines: {node: 20 || >=22} + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@jest/schemas@29.6.3': + resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.9': + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + + '@noble/hashes@1.8.0': + resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} + engines: {node: ^14.21.3 || >=16} + + '@opentelemetry/api-logs@0.202.0': + resolution: {integrity: sha512-fTBjMqKCfotFWfLzaKyhjLvyEyq5vDKTTFfBmx21btv3gvy8Lq6N5Dh2OzqeuN4DjtpSvNT1uNVfg08eD2Rfxw==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/api@1.9.0': + resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/context-async-hooks@2.0.1': + resolution: {integrity: sha512-XuY23lSI3d4PEqKA+7SLtAgwqIfc6E/E9eAQWLN1vlpC53ybO3o6jW4BsXo1xvz9lYyyWItfQDDLzezER01mCw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/core@2.0.1': + resolution: {integrity: sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/exporter-trace-otlp-http@0.202.0': + resolution: {integrity: sha512-/hKE8DaFCJuaQqE1IxpgkcjOolUIwgi3TgHElPVKGdGRBSmJMTmN/cr6vWa55pCJIXPyhKvcMrbrya7DZ3VmzA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-exporter-base@0.202.0': + resolution: {integrity: sha512-nMEOzel+pUFYuBJg2znGmHJWbmvMbdX5/RhoKNKowguMbURhz0fwik5tUKplLcUtl8wKPL1y9zPnPxeBn65N0Q==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-transformer@0.202.0': + resolution: {integrity: sha512-5XO77QFzs9WkexvJQL9ksxL8oVFb/dfi9NWQSq7Sv0Efr9x3N+nb1iklP1TeVgxqJ7m1xWiC/Uv3wupiQGevMw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/resources@2.0.1': + resolution: {integrity: sha512-dZOB3R6zvBwDKnHDTB4X1xtMArB/d324VsbiPkX/Yu0Q8T2xceRthoIVFhJdvgVM2QhGVUyX9tzwiNxGtoBJUw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-logs@0.202.0': + resolution: {integrity: sha512-pv8QiQLQzk4X909YKm0lnW4hpuQg4zHwJ4XBd5bZiXcd9urvrJNoNVKnxGHPiDVX/GiLFvr5DMYsDBQbZCypRQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.10.0' + + '@opentelemetry/sdk-metrics@2.0.1': + resolution: {integrity: sha512-wf8OaJoSnujMAHWR3g+/hGvNcsC16rf9s1So4JlMiFaFHiE4HpIA3oUh+uWZQ7CNuK8gVW/pQSkgoa5HkkOl0g==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.9.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@2.0.1': + resolution: {integrity: sha512-xYLlvk/xdScGx1aEqvxLwf6sXQLXCjk3/1SQT9X9AoN5rXRhkdvIFShuNNmtTEPRBqcsMbS4p/gJLNI2wXaDuQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-node@2.0.1': + resolution: {integrity: sha512-UhdbPF19pMpBtCWYP5lHbTogLWx9N0EBxtdagvkn5YtsAnCBZzL7SjktG+ZmupRgifsHMjwUaCCaVmqGfSADmA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/semantic-conventions@1.34.0': + resolution: {integrity: sha512-aKcOkyrorBGlajjRdVoJWHTxfxO1vCNHLJVlSDaRHDIdjU+pX8IYQPvPDkYiujKLbRnWU+1TBwEt0QRgSm4SGA==} + engines: {node: '>=14'} + + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + + '@redocly/ajv@8.11.2': + resolution: {integrity: sha512-io1JpnwtIcvojV7QKDUSIuMN/ikdOUd1ReEnUnMKGfDVridQZ31J0MmIuqwuRjWDZfmvr+Q0MqCcfHM2gTivOg==} + + '@redocly/ajv@8.11.3': + resolution: {integrity: sha512-4P3iZse91TkBiY+Dx5DUgxQ9GXkVJf++cmI0MOyLDxV9b5MUBI4II6ES8zA5JCbO72nKAJxWrw4PUPW+YP3ZDQ==} + + '@redocly/cli@2.4.0': + resolution: {integrity: sha512-RXINsLA5cFKZM0zB66/1rWWU4oENBZ5lcrnwcyoHQmaGt+rA7Glf/lORc9/JmkznVESFZ3t/9nsc7mSfGfwuAw==} + engines: {node: '>=22.12.0 || >=20.19.0 <21.0.0', npm: '>=10'} + hasBin: true + + '@redocly/config@0.22.2': + resolution: {integrity: sha512-roRDai8/zr2S9YfmzUfNhKjOF0NdcOIqF7bhf4MVC5UxpjIysDjyudvlAiVbpPHp3eDRWbdzUgtkK1a7YiDNyQ==} + + '@redocly/config@0.31.0': + resolution: {integrity: sha512-KPm2v//zj7qdGvClX0YqRNLQ9K7loVJWFEIceNxJIYPXP4hrhNvOLwjmxIkdkai0SdqYqogR2yjM/MjF9/AGdQ==} + + '@redocly/openapi-core@1.34.5': + resolution: {integrity: sha512-0EbE8LRbkogtcCXU7liAyC00n9uNG9hJ+eMyHFdUsy9lB/WGqnEBgwjA9q2cyzAVcdTkQqTBBU1XePNnN3OijA==} + engines: {node: '>=18.17.0', npm: '>=9.5.0'} + + '@redocly/openapi-core@2.4.0': + resolution: {integrity: sha512-ft4qzUu7g9vabIkp09uLwQwpWgT5aycrmUENh0WyvYDZwv4eDg+tDSMP1Grt0nPy+GJ/LnIMpaYAPhrIMMhMzg==} + engines: {node: '>=22.12.0 || >=20.19.0 <21.0.0', npm: '>=10'} + + '@redocly/respect-core@2.4.0': + resolution: {integrity: sha512-xpuOC/gySVc8Ncuz3ZKXWNxYI9BLgkuKh0YB7l2xXTgcFz84V3PHKDsivTPkP9eKKbCLZjjzmwJCr1eiFSrHjg==} + engines: {node: '>=22.12.0 || >=20.19.0 <21.0.0', npm: '>=10'} + + '@sinclair/typebox@0.27.8': + resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + + '@tsconfig/node10@1.0.11': + resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} + + '@tsconfig/node12@1.0.11': + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + + '@tsconfig/node14@1.0.3': + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + + '@tsconfig/node16@1.0.4': + resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} + + '@types/js-yaml@4.0.9': + resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/node@24.3.1': + resolution: {integrity: sha512-3vXmQDXy+woz+gnrTvuvNrPzekOi+Ds0ReMxw0LzBiK3a+1k0kQn9f2NWk+lgD4rJehFUmYy2gMhJ2ZI+7YP9g==} + + '@types/stylis@4.2.5': + resolution: {integrity: sha512-1Xve+NMN7FWjY14vLoY5tL3BVEQ/n42YLwaqJIPYhotZ9uBHt87VceMwWQpzmdEt2TNXIorIFG+YeCUUW7RInw==} + + '@types/trusted-types@2.0.7': + resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==} + + abort-controller@3.0.0: + resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} + engines: {node: '>=6.5'} + + acorn-walk@8.3.4: + resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==} + engines: {node: '>=0.4.0'} + + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + engines: {node: '>=0.4.0'} + hasBin: true + + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==} + engines: {node: '>= 14'} + + ajv-formats@2.1.1: + resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + + ajv@8.17.1: + resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.2.2: + resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + + ansi-styles@6.2.3: + resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} + engines: {node: '>=12'} + + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + + arg@4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + better-ajv-errors@1.2.0: + resolution: {integrity: sha512-UW+IsFycygIo7bclP9h5ugkNH8EjCSgqyFB/yQ4Hqqa1OEYDtb0uFIkYE0b6+CjkgJYVM5UKI/pJPxjYe9EZlA==} + engines: {node: '>= 12.13.0'} + peerDependencies: + ajv: 4.11.8 - 8 + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + boolbase@1.0.0: + resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + call-me-maybe@1.0.2: + resolution: {integrity: sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==} + + camelize@1.0.1: + resolution: {integrity: sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + cheerio-select@2.1.0: + resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} + + cheerio@1.1.2: + resolution: {integrity: sha512-IkxPpb5rS/d1IiLbHMgfPuS0FgiWTtFIm/Nj+2woXDLTZ7fOT2eqzgYbdMlLweqlHbsZjxEChoVK+7iph7jyQg==} + engines: {node: '>=20.18.1'} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + classnames@2.5.1: + resolution: {integrity: sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==} + + cliui@7.0.4: + resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} + + clsx@2.1.1: + resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} + engines: {node: '>=6'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + colorette@1.4.0: + resolution: {integrity: sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==} + + colorette@2.0.20: + resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + cookie@0.7.2: + resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} + engines: {node: '>= 0.6'} + + core-js@3.45.1: + resolution: {integrity: sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg==} + + create-require@1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + css-color-keywords@1.0.0: + resolution: {integrity: sha512-FyyrDHZKEjXDpNJYvVsV960FiqQyXc/LlYmsxl2BcdMb2WPx0OGRVgTg55rPSyLSNMqP52R9r8geSp7apN3Ofg==} + engines: {node: '>=4'} + + css-select@5.2.2: + resolution: {integrity: sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==} + + css-to-react-native@3.2.0: + resolution: {integrity: sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ==} + + css-what@6.2.2: + resolution: {integrity: sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==} + engines: {node: '>= 6'} + + csstype@3.1.3: + resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decko@1.2.0: + resolution: {integrity: sha512-m8FnyHXV1QX+S1cl+KPFDIl6NMkxtKsy6+U/aYyjrOqWMuwAwYWu7ePqrsUHtDR5Y8Yk2pi/KIDSgF+vT4cPOQ==} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + diff-sequences@29.6.3: + resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + diff@4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + + dom-serializer@2.0.0: + resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + + domelementtype@2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + + domhandler@5.0.3: + resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} + engines: {node: '>= 4'} + + dompurify@3.2.7: + resolution: {integrity: sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==} + + domutils@3.2.2: + resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==} + + dotenv@16.4.7: + resolution: {integrity: sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==} + engines: {node: '>=12'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + encoding-sniffer@0.2.1: + resolution: {integrity: sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw==} + + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + entities@6.0.1: + resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} + engines: {node: '>=0.12'} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + es6-promise@3.3.1: + resolution: {integrity: sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==} + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + event-target-shim@5.0.1: + resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} + engines: {node: '>=6'} + + eventemitter3@5.0.1: + resolution: {integrity: sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-safe-stringify@2.1.1: + resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==} + + fast-uri@3.1.0: + resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + + fast-xml-parser@4.5.3: + resolution: {integrity: sha512-RKihhV+SHsIUGXObeVy9AXiBbFwkVk7Syp8XgwN5U3JV416+Gwp/GO9i0JYKmikykgz/UHRrrV4ROuZEo/T0ig==} + hasBin: true + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + foreach@2.0.6: + resolution: {integrity: sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==} + + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + + form-data@4.0.4: + resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==} + engines: {node: '>= 6'} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob@11.0.3: + resolution: {integrity: sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==} + engines: {node: 20 || >=22} + hasBin: true + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + handlebars@4.7.8: + resolution: {integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==} + engines: {node: '>=0.4.7'} + hasBin: true + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + htmlparser2@10.0.0: + resolution: {integrity: sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g==} + + http2-client@1.3.5: + resolution: {integrity: sha512-EC2utToWl4RKfs5zd36Mxq7nzHHBuomZboI0yYL6Y0RmBgT7Sgkq4rQ0ezFTYoIsSs7Tm9SJe+o2FcAg6GBhGA==} + + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jackspeak@4.1.1: + resolution: {integrity: sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==} + engines: {node: 20 || >=22} + + jest-diff@29.7.0: + resolution: {integrity: sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-get-type@29.6.3: + resolution: {integrity: sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-matcher-utils@29.7.0: + resolution: {integrity: sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + js-levenshtein@1.1.6: + resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==} + engines: {node: '>=0.10.0'} + + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + json-pointer@0.6.2: + resolution: {integrity: sha512-vLWcKbOaXlO+jvRy4qNd+TI1QUPZzfJj1tpJ3vAXDych5XJf93ftpUKe5pKCrzyIIwgBJcOcCVRUfqQP25afBw==} + + json-schema-to-ts@2.7.2: + resolution: {integrity: sha512-R1JfqKqbBR4qE8UyBR56Ms30LL62/nlhoz+1UkfI/VE7p54Awu919FZ6ZUPG8zIa3XB65usPJgr1ONVncUGSaQ==} + engines: {node: '>=16'} + + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + + jsonpath-rfc9535@1.3.0: + resolution: {integrity: sha512-3jFHya7oZ45aDxIIdx+/zQARahHXxFSMWBkcBUldfXpLS9VCXDJyTKt35kQfEXLqh0K3Ixw/9xFnvcDStaxh7Q==} + engines: {node: '>=20'} + + jsonpointer@5.0.1: + resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} + engines: {node: '>=0.10.0'} + + leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} + + loose-envify@1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + + lru-cache@11.2.2: + resolution: {integrity: sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==} + engines: {node: 20 || >=22} + + lunr@2.3.9: + resolution: {integrity: sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==} + + make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + + mark.js@8.11.1: + resolution: {integrity: sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==} + + marked@4.3.0: + resolution: {integrity: sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==} + engines: {node: '>= 12'} + hasBin: true + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + minimatch@10.0.3: + resolution: {integrity: sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==} + engines: {node: 20 || >=22} + + minimatch@5.1.6: + resolution: {integrity: sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==} + engines: {node: '>=10'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + + mobx-react-lite@4.1.1: + resolution: {integrity: sha512-iUxiMpsvNraCKXU+yPotsOncNNmyeS2B5DKL+TL6Tar/xm+wwNJAubJmtRSeAoYawdZqwv8Z/+5nPRHeQxTiXg==} + peerDependencies: + mobx: ^6.9.0 + react: ^16.8.0 || ^17 || ^18 || ^19 + react-dom: '*' + react-native: '*' + peerDependenciesMeta: + react-dom: + optional: true + react-native: + optional: true + + mobx-react@9.2.0: + resolution: {integrity: sha512-dkGWCx+S0/1mfiuFfHRH8D9cplmwhxOV5CkXMp38u6rQGG2Pv3FWYztS0M7ncR6TyPRQKaTG/pnitInoYE9Vrw==} + peerDependencies: + mobx: ^6.9.0 + react: ^16.8.0 || ^17 || ^18 || ^19 + react-dom: '*' + react-native: '*' + peerDependenciesMeta: + react-dom: + optional: true + react-native: + optional: true + + mobx@6.15.0: + resolution: {integrity: sha512-UczzB+0nnwGotYSgllfARAqWCJ5e/skuV2K/l+Zyck/H6pJIhLXuBnz+6vn2i211o7DtbE78HQtsYEKICHGI+g==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + neo-async@2.6.2: + resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} + + node-fetch-h2@2.3.0: + resolution: {integrity: sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==} + engines: {node: 4.x || >=6.0.0} + + node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + + node-readfiles@0.2.0: + resolution: {integrity: sha512-SU00ZarexNlE4Rjdm83vglt5Y9yiQ+XI1XpflWlb7q7UTN1JUItm69xMeiQCTxtTfnzt+83T8Cx+vI2ED++VDA==} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + nth-check@2.1.1: + resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + + oas-kit-common@1.0.8: + resolution: {integrity: sha512-pJTS2+T0oGIwgjGpw7sIRU8RQMcUoKCDWFLdBqKB2BNmGpbBMH2sdqAaOXUg8OzonZHU0L7vfJu1mJFEiYDWOQ==} + + oas-linter@3.2.2: + resolution: {integrity: sha512-KEGjPDVoU5K6swgo9hJVA/qYGlwfbFx+Kg2QB/kd7rzV5N8N5Mg6PlsoCMohVnQmo+pzJap/F610qTodKzecGQ==} + + oas-resolver@2.5.6: + resolution: {integrity: sha512-Yx5PWQNZomfEhPPOphFbZKi9W93CocQj18NlD2Pa4GWZzdZpSJvYwoiuurRI7m3SpcChrnO08hkuQDL3FGsVFQ==} + hasBin: true + + oas-schema-walker@1.1.5: + resolution: {integrity: sha512-2yucenq1a9YPmeNExoUa9Qwrt9RFkjqaMAA1X+U7sbb0AqBeTIdMHky9SQQ6iN94bO5NW0W4TRYXerG+BdAvAQ==} + + oas-validator@5.0.8: + resolution: {integrity: sha512-cu20/HE5N5HKqVygs3dt94eYJfBi0TsZvPVXDhbXQHiEityDN+RROTleefoKRKKJ9dFAF2JBkDHgvWj0sjKGmw==} + + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + openapi-sampler@1.6.1: + resolution: {integrity: sha512-s1cIatOqrrhSj2tmJ4abFYZQK6l5v+V4toO5q1Pa0DyN8mtyqy2I+Qrj5W9vOELEtybIMQs/TBZGVO/DtTFK8w==} + + outdent@0.8.0: + resolution: {integrity: sha512-KiOAIsdpUTcAXuykya5fnVVT+/5uS0Q1mrkRHcF89tpieSmY33O/tmc54CqwA+bfhbtEfZUNLHaPUiB9X3jt1A==} + + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + + parse5-htmlparser2-tree-adapter@7.1.0: + resolution: {integrity: sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==} + + parse5-parser-stream@7.1.2: + resolution: {integrity: sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==} + + parse5@7.3.0: + resolution: {integrity: sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==} + + path-browserify@1.0.1: + resolution: {integrity: sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-scurry@2.0.0: + resolution: {integrity: sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==} + engines: {node: 20 || >=22} + + perfect-scrollbar@1.5.6: + resolution: {integrity: sha512-rixgxw3SxyJbCaSpo1n35A/fwI1r2rdwMKOTCg/AcG+xOEyZcE8UHVjpZMFCVImzsFoCZeJTT+M/rdEIQYO2nw==} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + pluralize@8.0.0: + resolution: {integrity: sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==} + engines: {node: '>=4'} + + polished@4.3.1: + resolution: {integrity: sha512-OBatVyC/N7SCW/FaDHrSd+vn0o5cS855TOmYi4OkdWUMSJCET/xip//ch8xGUvtr3i44X9LVyWwQlRMTN3pwSA==} + engines: {node: '>=10'} + + postcss-value-parser@4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + + postcss@8.4.49: + resolution: {integrity: sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==} + engines: {node: ^10 || ^12 || >=14} + + pretty-format@29.7.0: + resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + prismjs@1.30.0: + resolution: {integrity: sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==} + engines: {node: '>=6'} + + prop-types@15.8.1: + resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + + protobufjs@7.5.4: + resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==} + engines: {node: '>=12.0.0'} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + randombytes@2.1.0: + resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} + + react-dom@19.2.0: + resolution: {integrity: sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ==} + peerDependencies: + react: ^19.2.0 + + react-is@16.13.1: + resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} + + react-is@18.3.1: + resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + + react-tabs@6.1.0: + resolution: {integrity: sha512-6QtbTRDKM+jA/MZTTefvigNxo0zz+gnBTVFw2CFVvq+f2BuH0nF0vDLNClL045nuTAdOoK/IL1vTP0ZLX0DAyQ==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + + react@19.2.0: + resolution: {integrity: sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==} + engines: {node: '>=0.10.0'} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + redoc@2.5.1: + resolution: {integrity: sha512-LmqA+4A3CmhTllGG197F0arUpmChukAj9klfSdxNRemT9Hr07xXr7OGKu4PHzBs359sgrJ+4JwmOlM7nxLPGMg==} + engines: {node: '>=6.9', npm: '>=3.0.0'} + peerDependencies: + core-js: ^3.1.4 + mobx: ^6.0.4 + react: ^16.8.4 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.4 || ^17.0.0 || ^18.0.0 || ^19.0.0 + styled-components: ^4.1.1 || ^5.1.1 || ^6.0.5 + + reftools@1.1.9: + resolution: {integrity: sha512-OVede/NQE13xBQ+ob5CKd5KyeJYU2YInb1bmV4nRoOfquZPkAkxuOXicSe1PvqIuZZ4kD13sPKBbR7UFDmli6w==} + + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + scheduler@0.27.0: + resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} + + semver@7.7.3: + resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} + engines: {node: '>=10'} + hasBin: true + + set-cookie-parser@2.7.1: + resolution: {integrity: sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==} + + shallowequal@1.1.0: + resolution: {integrity: sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + should-equal@2.0.0: + resolution: {integrity: sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==} + + should-format@3.0.3: + resolution: {integrity: sha512-hZ58adtulAk0gKtua7QxevgUaXTTXxIi8t41L3zo9AHvjXO1/7sdLECuHeIN2SRtYXpNkmhoUP2pdeWgricQ+Q==} + + should-type-adaptors@1.1.0: + resolution: {integrity: sha512-JA4hdoLnN+kebEp2Vs8eBe9g7uy0zbRo+RMcU0EsNy+R+k049Ki+N5tT5Jagst2g7EAja+euFuoXFCa8vIklfA==} + + should-type@1.4.0: + resolution: {integrity: sha512-MdAsTu3n25yDbIe1NeN69G4n6mUnJGtSJHygX3+oN0ZbO3DTiATnf7XnYJdGT42JCXurTb1JI0qOBR65shvhPQ==} + + should-util@1.0.1: + resolution: {integrity: sha512-oXF8tfxx5cDk8r2kYqlkUJzZpDBqVY/II2WhvU0n9Y3XYvAYRmeaf1PvvIvTgPnv4KJ+ES5M0PyDq5Jp+Ygy2g==} + + should@13.2.3: + resolution: {integrity: sha512-ggLesLtu2xp+ZxI+ysJTmNjh2U0TsC+rQ/pfED9bUZZ4DKefP27D+7YJVVTvKsmjLpIi9jAa7itwDGkDDmt1GQ==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + simple-websocket@9.1.0: + resolution: {integrity: sha512-8MJPnjRN6A8UCp1I+H/dSFyjwJhp6wta4hsVRhjf8w9qBHRzxYt14RaOcjvQnhD1N4yKOddEjflwMnQM4VtXjQ==} + + slugify@1.4.7: + resolution: {integrity: sha512-tf+h5W1IrjNm/9rKKj0JU2MDMruiopx0jjVA5zCdBtcGjfp0+c5rHw/zADLC3IeKlGHtVbHtpfzvYA0OYT+HKg==} + engines: {node: '>=8.0.0'} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + stickyfill@1.1.1: + resolution: {integrity: sha512-GCp7vHAfpao+Qh/3Flh9DXEJ/qSi0KJwJw6zYlZOtRYXWUIpMM6mC2rIep/dK8RQqwW0KxGJIllmjPIBOGN8AA==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.2: + resolution: {integrity: sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==} + engines: {node: '>=12'} + + strnum@1.1.2: + resolution: {integrity: sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA==} + + styled-components@6.1.19: + resolution: {integrity: sha512-1v/e3Dl1BknC37cXMhwGomhO8AkYmN41CqyX9xhUDxry1ns3BFQy2lLDRQXJRdVVWB9OHemv/53xaStimvWyuA==} + engines: {node: '>= 16'} + peerDependencies: + react: '>= 16.8.0' + react-dom: '>= 16.8.0' + + stylis@4.3.2: + resolution: {integrity: sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + swagger2openapi@7.0.8: + resolution: {integrity: sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==} + hasBin: true + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + + ts-algebra@1.2.2: + resolution: {integrity: sha512-kloPhf1hq3JbCPOTYoOWDKxebWjNb2o/LKnNfkWhxVVisFFmMJPPdJeGoGmM+iRLyoXAR61e08Pb+vUXINg8aA==} + + ts-node@10.9.2: + resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + + tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + + typescript@5.9.2: + resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} + engines: {node: '>=14.17'} + hasBin: true + + uglify-js@3.19.3: + resolution: {integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==} + engines: {node: '>=0.8.0'} + hasBin: true + + undici-types@7.10.0: + resolution: {integrity: sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==} + + undici@6.22.0: + resolution: {integrity: sha512-hU/10obOIu62MGYjdskASR3CUAiYaFTtC9Pa6vHyf//mAipSvSQg6od2CnJswq7fvzNS3zJhxoRkgNVaHurWKw==} + engines: {node: '>=18.17'} + + undici@7.15.0: + resolution: {integrity: sha512-7oZJCPvvMvTd0OlqWsIxTuItTpJBpU1tcbVl24FMn3xt3+VSunwUasmfPJRE57oNO1KsZ4PgA1xTdAX4hq8NyQ==} + engines: {node: '>=20.18.1'} + + uri-js-replace@1.0.1: + resolution: {integrity: sha512-W+C9NWNLFOoBI2QWDp4UT9pv65r2w5Cx+3sTYFvtMdDBxkKt1syCqsUdSFAChbEe1uK5TfS04wt/nGwmaeIQ0g==} + + url-template@2.0.8: + resolution: {integrity: sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==} + + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + v8-compile-cache-lib@3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + + webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + + whatwg-encoding@3.1.1: + resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} + engines: {node: '>=18'} + + whatwg-mimetype@4.0.0: + resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} + engines: {node: '>=18'} + + whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + ws@7.5.10: + resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==} + engines: {node: '>=8.3.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + + yaml-ast-parser@0.0.43: + resolution: {integrity: sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A==} + + yaml@1.10.2: + resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} + engines: {node: '>= 6'} + + yargs-parser@20.2.9: + resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} + engines: {node: '>=10'} + + yargs@17.0.1: + resolution: {integrity: sha512-xBBulfCc8Y6gLFcrPvtqKz9hz8SO0l1Ni8GgDekvBX2ro0HRQImDGnikfc33cgzcYUSncapnNcZDjVFIH3f6KQ==} + engines: {node: '>=12'} + + yn@3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + +snapshots: + + '@babel/code-frame@7.27.1': + dependencies: + '@babel/helper-validator-identifier': 7.27.1 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/helper-validator-identifier@7.27.1': {} + + '@babel/runtime@7.28.4': {} + + '@cspotcode/source-map-support@0.8.1': + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + + '@emotion/is-prop-valid@1.2.2': + dependencies: + '@emotion/memoize': 0.8.1 + + '@emotion/memoize@0.8.1': {} + + '@emotion/unitless@0.8.1': {} + + '@exodus/schemasafe@1.3.0': {} + + '@faker-js/faker@7.6.0': {} + + '@humanwhocodes/momoa@2.0.4': {} + + '@isaacs/balanced-match@4.0.1': {} + + '@isaacs/brace-expansion@5.0.0': + dependencies: + '@isaacs/balanced-match': 4.0.1 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.2 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@jest/schemas@29.6.3': + dependencies: + '@sinclair/typebox': 0.27.8 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.9': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@noble/hashes@1.8.0': {} + + '@opentelemetry/api-logs@0.202.0': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/api@1.9.0': {} + + '@opentelemetry/context-async-hooks@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + + '@opentelemetry/core@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.34.0 + + '@opentelemetry/exporter-trace-otlp-http@0.202.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.202.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.202.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-exporter-base@0.202.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.202.0(@opentelemetry/api@1.9.0) + + '@opentelemetry/otlp-transformer@0.202.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.202.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.202.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) + protobufjs: 7.5.4 + + '@opentelemetry/resources@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.34.0 + + '@opentelemetry/sdk-logs@0.202.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.202.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/sdk-metrics@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.34.0 + + '@opentelemetry/sdk-trace-node@2.0.1(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/semantic-conventions@1.34.0': {} + + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + + '@redocly/ajv@8.11.2': + dependencies: + fast-deep-equal: 3.1.3 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + uri-js-replace: 1.0.1 + + '@redocly/ajv@8.11.3': + dependencies: + fast-deep-equal: 3.1.3 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + uri-js-replace: 1.0.1 + + '@redocly/cli@2.4.0(@opentelemetry/api@1.9.0)(ajv@8.17.1)(core-js@3.45.1)': + dependencies: + '@opentelemetry/exporter-trace-otlp-http': 0.202.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-node': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.34.0 + '@redocly/openapi-core': 2.4.0(ajv@8.17.1) + '@redocly/respect-core': 2.4.0(ajv@8.17.1) + abort-controller: 3.0.0 + chokidar: 3.6.0 + colorette: 1.4.0 + cookie: 0.7.2 + dotenv: 16.4.7 + form-data: 4.0.4 + glob: 11.0.3 + handlebars: 4.7.8 + https-proxy-agent: 7.0.6 + mobx: 6.15.0 + pluralize: 8.0.0 + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + redoc: 2.5.1(core-js@3.45.1)(mobx@6.15.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(styled-components@6.1.19(react-dom@19.2.0(react@19.2.0))(react@19.2.0)) + semver: 7.7.3 + set-cookie-parser: 2.7.1 + simple-websocket: 9.1.0 + styled-components: 6.1.19(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + undici: 6.22.0 + yargs: 17.0.1 + transitivePeerDependencies: + - '@opentelemetry/api' + - ajv + - bufferutil + - core-js + - encoding + - react-native + - supports-color + - utf-8-validate + + '@redocly/config@0.22.2': {} + + '@redocly/config@0.31.0': + dependencies: + json-schema-to-ts: 2.7.2 + + '@redocly/openapi-core@1.34.5': + dependencies: + '@redocly/ajv': 8.11.3 + '@redocly/config': 0.22.2 + colorette: 1.4.0 + https-proxy-agent: 7.0.6 + js-levenshtein: 1.1.6 + js-yaml: 4.1.0 + minimatch: 5.1.6 + pluralize: 8.0.0 + yaml-ast-parser: 0.0.43 + transitivePeerDependencies: + - supports-color + + '@redocly/openapi-core@2.4.0(ajv@8.17.1)': + dependencies: + '@redocly/ajv': 8.11.3 + '@redocly/config': 0.31.0 + ajv-formats: 2.1.1(ajv@8.17.1) + colorette: 1.4.0 + js-levenshtein: 1.1.6 + js-yaml: 4.1.0 + minimatch: 10.0.3 + pluralize: 8.0.0 + yaml-ast-parser: 0.0.43 + transitivePeerDependencies: + - ajv + + '@redocly/respect-core@2.4.0(ajv@8.17.1)': + dependencies: + '@faker-js/faker': 7.6.0 + '@noble/hashes': 1.8.0 + '@redocly/ajv': 8.11.2 + '@redocly/openapi-core': 2.4.0(ajv@8.17.1) + better-ajv-errors: 1.2.0(ajv@8.17.1) + colorette: 2.0.20 + jest-matcher-utils: 29.7.0 + json-pointer: 0.6.2 + jsonpath-rfc9535: 1.3.0 + openapi-sampler: 1.6.1 + outdent: 0.8.0 + transitivePeerDependencies: + - ajv + + '@sinclair/typebox@0.27.8': {} + + '@tsconfig/node10@1.0.11': {} + + '@tsconfig/node12@1.0.11': {} + + '@tsconfig/node14@1.0.3': {} + + '@tsconfig/node16@1.0.4': {} + + '@types/js-yaml@4.0.9': {} + + '@types/json-schema@7.0.15': {} + + '@types/node@24.3.1': + dependencies: + undici-types: 7.10.0 + + '@types/stylis@4.2.5': {} + + '@types/trusted-types@2.0.7': + optional: true + + abort-controller@3.0.0: + dependencies: + event-target-shim: 5.0.1 + + acorn-walk@8.3.4: + dependencies: + acorn: 8.15.0 + + acorn@8.15.0: {} + + agent-base@7.1.4: {} + + ajv-formats@2.1.1(ajv@8.17.1): + optionalDependencies: + ajv: 8.17.1 + + ajv@8.17.1: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.0 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + + ansi-regex@5.0.1: {} + + ansi-regex@6.2.2: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@5.2.0: {} + + ansi-styles@6.2.3: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + arg@4.1.3: {} + + argparse@2.0.1: {} + + asynckit@0.4.0: {} + + balanced-match@1.0.2: {} + + better-ajv-errors@1.2.0(ajv@8.17.1): + dependencies: + '@babel/code-frame': 7.27.1 + '@humanwhocodes/momoa': 2.0.4 + ajv: 8.17.1 + chalk: 4.1.2 + jsonpointer: 5.0.1 + leven: 3.1.0 + + binary-extensions@2.3.0: {} + + boolbase@1.0.0: {} + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + call-me-maybe@1.0.2: {} + + camelize@1.0.1: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + cheerio-select@2.1.0: + dependencies: + boolbase: 1.0.0 + css-select: 5.2.2 + css-what: 6.2.2 + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.2.2 + + cheerio@1.1.2: + dependencies: + cheerio-select: 2.1.0 + dom-serializer: 2.0.0 + domhandler: 5.0.3 + domutils: 3.2.2 + encoding-sniffer: 0.2.1 + htmlparser2: 10.0.0 + parse5: 7.3.0 + parse5-htmlparser2-tree-adapter: 7.1.0 + parse5-parser-stream: 7.1.2 + undici: 7.15.0 + whatwg-mimetype: 4.0.0 + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.3 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + classnames@2.5.1: {} + + cliui@7.0.4: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + clsx@2.1.1: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + colorette@1.4.0: {} + + colorette@2.0.20: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + cookie@0.7.2: {} + + core-js@3.45.1: {} + + create-require@1.1.1: {} + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + css-color-keywords@1.0.0: {} + + css-select@5.2.2: + dependencies: + boolbase: 1.0.0 + css-what: 6.2.2 + domhandler: 5.0.3 + domutils: 3.2.2 + nth-check: 2.1.1 + + css-to-react-native@3.2.0: + dependencies: + camelize: 1.0.1 + css-color-keywords: 1.0.0 + postcss-value-parser: 4.2.0 + + css-what@6.2.2: {} + + csstype@3.1.3: {} + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + decko@1.2.0: {} + + delayed-stream@1.0.0: {} + + diff-sequences@29.6.3: {} + + diff@4.0.2: {} + + dom-serializer@2.0.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + entities: 4.5.0 + + domelementtype@2.3.0: {} + + domhandler@5.0.3: + dependencies: + domelementtype: 2.3.0 + + dompurify@3.2.7: + optionalDependencies: + '@types/trusted-types': 2.0.7 + + domutils@3.2.2: + dependencies: + dom-serializer: 2.0.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + + dotenv@16.4.7: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + eastasianwidth@0.2.0: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + encoding-sniffer@0.2.1: + dependencies: + iconv-lite: 0.6.3 + whatwg-encoding: 3.1.1 + + entities@4.5.0: {} + + entities@6.0.1: {} + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + es6-promise@3.3.1: {} + + escalade@3.2.0: {} + + event-target-shim@5.0.1: {} + + eventemitter3@5.0.1: {} + + fast-deep-equal@3.1.3: {} + + fast-safe-stringify@2.1.1: {} + + fast-uri@3.1.0: {} + + fast-xml-parser@4.5.3: + dependencies: + strnum: 1.1.2 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + foreach@2.0.6: {} + + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + form-data@4.0.4: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + get-caller-file@2.0.5: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob@11.0.3: + dependencies: + foreground-child: 3.3.1 + jackspeak: 4.1.1 + minimatch: 10.0.3 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 2.0.0 + + gopd@1.2.0: {} + + handlebars@4.7.8: + dependencies: + minimist: 1.2.8 + neo-async: 2.6.2 + source-map: 0.6.1 + wordwrap: 1.0.0 + optionalDependencies: + uglify-js: 3.19.3 + + has-flag@4.0.0: {} + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + htmlparser2@10.0.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.2.2 + entities: 6.0.1 + + http2-client@1.3.5: {} + + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + inherits@2.0.4: {} + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + isexe@2.0.0: {} + + jackspeak@4.1.1: + dependencies: + '@isaacs/cliui': 8.0.2 + + jest-diff@29.7.0: + dependencies: + chalk: 4.1.2 + diff-sequences: 29.6.3 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + + jest-get-type@29.6.3: {} + + jest-matcher-utils@29.7.0: + dependencies: + chalk: 4.1.2 + jest-diff: 29.7.0 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + + js-levenshtein@1.1.6: {} + + js-tokens@4.0.0: {} + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + json-pointer@0.6.2: + dependencies: + foreach: 2.0.6 + + json-schema-to-ts@2.7.2: + dependencies: + '@babel/runtime': 7.28.4 + '@types/json-schema': 7.0.15 + ts-algebra: 1.2.2 + + json-schema-traverse@1.0.0: {} + + jsonpath-rfc9535@1.3.0: {} + + jsonpointer@5.0.1: {} + + leven@3.1.0: {} + + long@5.3.2: {} + + loose-envify@1.4.0: + dependencies: + js-tokens: 4.0.0 + + lru-cache@11.2.2: {} + + lunr@2.3.9: {} + + make-error@1.3.6: {} + + mark.js@8.11.1: {} + + marked@4.3.0: {} + + math-intrinsics@1.1.0: {} + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + minimatch@10.0.3: + dependencies: + '@isaacs/brace-expansion': 5.0.0 + + minimatch@5.1.6: + dependencies: + brace-expansion: 2.0.2 + + minimist@1.2.8: {} + + minipass@7.1.2: {} + + mobx-react-lite@4.1.1(mobx@6.15.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0): + dependencies: + mobx: 6.15.0 + react: 19.2.0 + use-sync-external-store: 1.6.0(react@19.2.0) + optionalDependencies: + react-dom: 19.2.0(react@19.2.0) + + mobx-react@9.2.0(mobx@6.15.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0): + dependencies: + mobx: 6.15.0 + mobx-react-lite: 4.1.1(mobx@6.15.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + react: 19.2.0 + optionalDependencies: + react-dom: 19.2.0(react@19.2.0) + + mobx@6.15.0: {} + + ms@2.1.3: {} + + nanoid@3.3.11: {} + + neo-async@2.6.2: {} + + node-fetch-h2@2.3.0: + dependencies: + http2-client: 1.3.5 + + node-fetch@2.7.0: + dependencies: + whatwg-url: 5.0.0 + + node-readfiles@0.2.0: + dependencies: + es6-promise: 3.3.1 + + normalize-path@3.0.0: {} + + nth-check@2.1.1: + dependencies: + boolbase: 1.0.0 + + oas-kit-common@1.0.8: + dependencies: + fast-safe-stringify: 2.1.1 + + oas-linter@3.2.2: + dependencies: + '@exodus/schemasafe': 1.3.0 + should: 13.2.3 + yaml: 1.10.2 + + oas-resolver@2.5.6: + dependencies: + node-fetch-h2: 2.3.0 + oas-kit-common: 1.0.8 + reftools: 1.1.9 + yaml: 1.10.2 + yargs: 17.0.1 + + oas-schema-walker@1.1.5: {} + + oas-validator@5.0.8: + dependencies: + call-me-maybe: 1.0.2 + oas-kit-common: 1.0.8 + oas-linter: 3.2.2 + oas-resolver: 2.5.6 + oas-schema-walker: 1.1.5 + reftools: 1.1.9 + should: 13.2.3 + yaml: 1.10.2 + + object-assign@4.1.1: {} + + openapi-sampler@1.6.1: + dependencies: + '@types/json-schema': 7.0.15 + fast-xml-parser: 4.5.3 + json-pointer: 0.6.2 + + outdent@0.8.0: {} + + package-json-from-dist@1.0.1: {} + + parse5-htmlparser2-tree-adapter@7.1.0: + dependencies: + domhandler: 5.0.3 + parse5: 7.3.0 + + parse5-parser-stream@7.1.2: + dependencies: + parse5: 7.3.0 + + parse5@7.3.0: + dependencies: + entities: 6.0.1 + + path-browserify@1.0.1: {} + + path-key@3.1.1: {} + + path-scurry@2.0.0: + dependencies: + lru-cache: 11.2.2 + minipass: 7.1.2 + + perfect-scrollbar@1.5.6: {} + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + pluralize@8.0.0: {} + + polished@4.3.1: + dependencies: + '@babel/runtime': 7.28.4 + + postcss-value-parser@4.2.0: {} + + postcss@8.4.49: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + pretty-format@29.7.0: + dependencies: + '@jest/schemas': 29.6.3 + ansi-styles: 5.2.0 + react-is: 18.3.1 + + prismjs@1.30.0: {} + + prop-types@15.8.1: + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + + protobufjs@7.5.4: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 24.3.1 + long: 5.3.2 + + queue-microtask@1.2.3: {} + + randombytes@2.1.0: + dependencies: + safe-buffer: 5.2.1 + + react-dom@19.2.0(react@19.2.0): + dependencies: + react: 19.2.0 + scheduler: 0.27.0 + + react-is@16.13.1: {} + + react-is@18.3.1: {} + + react-tabs@6.1.0(react@19.2.0): + dependencies: + clsx: 2.1.1 + prop-types: 15.8.1 + react: 19.2.0 + + react@19.2.0: {} + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + redoc@2.5.1(core-js@3.45.1)(mobx@6.15.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(styled-components@6.1.19(react-dom@19.2.0(react@19.2.0))(react@19.2.0)): + dependencies: + '@redocly/openapi-core': 1.34.5 + classnames: 2.5.1 + core-js: 3.45.1 + decko: 1.2.0 + dompurify: 3.2.7 + eventemitter3: 5.0.1 + json-pointer: 0.6.2 + lunr: 2.3.9 + mark.js: 8.11.1 + marked: 4.3.0 + mobx: 6.15.0 + mobx-react: 9.2.0(mobx@6.15.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + openapi-sampler: 1.6.1 + path-browserify: 1.0.1 + perfect-scrollbar: 1.5.6 + polished: 4.3.1 + prismjs: 1.30.0 + prop-types: 15.8.1 + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + react-tabs: 6.1.0(react@19.2.0) + slugify: 1.4.7 + stickyfill: 1.1.1 + styled-components: 6.1.19(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + swagger2openapi: 7.0.8 + url-template: 2.0.8 + transitivePeerDependencies: + - encoding + - react-native + - supports-color + + reftools@1.1.9: {} + + require-directory@2.1.1: {} + + require-from-string@2.0.2: {} + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + scheduler@0.27.0: {} + + semver@7.7.3: {} + + set-cookie-parser@2.7.1: {} + + shallowequal@1.1.0: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + should-equal@2.0.0: + dependencies: + should-type: 1.4.0 + + should-format@3.0.3: + dependencies: + should-type: 1.4.0 + should-type-adaptors: 1.1.0 + + should-type-adaptors@1.1.0: + dependencies: + should-type: 1.4.0 + should-util: 1.0.1 + + should-type@1.4.0: {} + + should-util@1.0.1: {} + + should@13.2.3: + dependencies: + should-equal: 2.0.0 + should-format: 3.0.3 + should-type: 1.4.0 + should-type-adaptors: 1.1.0 + should-util: 1.0.1 + + signal-exit@4.1.0: {} + + simple-websocket@9.1.0: + dependencies: + debug: 4.4.3 + queue-microtask: 1.2.3 + randombytes: 2.1.0 + readable-stream: 3.6.2 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + slugify@1.4.7: {} + + source-map-js@1.2.1: {} + + source-map@0.6.1: {} + + stickyfill@1.1.1: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.2 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.2: + dependencies: + ansi-regex: 6.2.2 + + strnum@1.1.2: {} + + styled-components@6.1.19(react-dom@19.2.0(react@19.2.0))(react@19.2.0): + dependencies: + '@emotion/is-prop-valid': 1.2.2 + '@emotion/unitless': 0.8.1 + '@types/stylis': 4.2.5 + css-to-react-native: 3.2.0 + csstype: 3.1.3 + postcss: 8.4.49 + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + shallowequal: 1.1.0 + stylis: 4.3.2 + tslib: 2.6.2 + + stylis@4.3.2: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + swagger2openapi@7.0.8: + dependencies: + call-me-maybe: 1.0.2 + node-fetch: 2.7.0 + node-fetch-h2: 2.3.0 + node-readfiles: 0.2.0 + oas-kit-common: 1.0.8 + oas-resolver: 2.5.6 + oas-schema-walker: 1.1.5 + oas-validator: 5.0.8 + reftools: 1.1.9 + yaml: 1.10.2 + yargs: 17.0.1 + transitivePeerDependencies: + - encoding + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + tr46@0.0.3: {} + + ts-algebra@1.2.2: {} + + ts-node@10.9.2(@types/node@24.3.1)(typescript@5.9.2): + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.11 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.4 + '@types/node': 24.3.1 + acorn: 8.15.0 + acorn-walk: 8.3.4 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 5.9.2 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + + tslib@2.6.2: {} + + typescript@5.9.2: {} + + uglify-js@3.19.3: + optional: true + + undici-types@7.10.0: {} + + undici@6.22.0: {} + + undici@7.15.0: {} + + uri-js-replace@1.0.1: {} + + url-template@2.0.8: {} + + use-sync-external-store@1.6.0(react@19.2.0): + dependencies: + react: 19.2.0 + + util-deprecate@1.0.2: {} + + v8-compile-cache-lib@3.0.1: {} + + webidl-conversions@3.0.1: {} + + whatwg-encoding@3.1.1: + dependencies: + iconv-lite: 0.6.3 + + whatwg-mimetype@4.0.0: {} + + whatwg-url@5.0.0: + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + wordwrap@1.0.0: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.3 + string-width: 5.1.2 + strip-ansi: 7.1.2 + + ws@7.5.10: {} + + y18n@5.0.8: {} + + yaml-ast-parser@0.0.43: {} + + yaml@1.10.2: {} + + yargs-parser@20.2.9: {} + + yargs@17.0.1: + dependencies: + cliui: 7.0.4 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 20.2.9 + + yn@3.1.1: {} diff --git a/services/groupware/pnpm-workspace.yaml b/services/groupware/pnpm-workspace.yaml new file mode 100644 index 0000000000..54168a08ef --- /dev/null +++ b/services/groupware/pnpm-workspace.yaml @@ -0,0 +1,3 @@ +onlyBuiltDependencies: + - core-js + - protobufjs diff --git a/services/groupware/tsconfig.json b/services/groupware/tsconfig.json new file mode 100644 index 0000000000..526e5ef48b --- /dev/null +++ b/services/groupware/tsconfig.json @@ -0,0 +1,10 @@ +{ + "ts-node": { + "transpileOnly": true, + "compilerOptions": { + "module": "ESNext", + "esModuleInterop": true + }, + "esm": true + } +} diff --git a/services/proxy/pkg/config/defaults/defaultconfig.go b/services/proxy/pkg/config/defaults/defaultconfig.go index 9e0985003a..299ebe9e57 100644 --- a/services/proxy/pkg/config/defaults/defaultconfig.go +++ b/services/proxy/pkg/config/defaults/defaultconfig.go @@ -291,6 +291,16 @@ func DefaultPolicies() []config.Policy { Unprotected: true, SkipXAccessToken: true, }, + { + Endpoint: "/groupware", + Service: "eu.opencloud.web.groupware", + }, + { + Endpoint: "/auth", + Service: "eu.opencloud.web.auth-api", + Unprotected: true, + SkipXAccessToken: true, + }, }, }, } diff --git a/tests/groupware/.gitignore b/tests/groupware/.gitignore new file mode 100644 index 0000000000..3340cfac37 --- /dev/null +++ b/tests/groupware/.gitignore @@ -0,0 +1 @@ +/users.csv diff --git a/tests/groupware/go.mod b/tests/groupware/go.mod new file mode 100644 index 0000000000..43a51642e1 --- /dev/null +++ b/tests/groupware/go.mod @@ -0,0 +1,36 @@ +module opencloud.eu/groupware/tests + +go 1.24.2 + +require github.com/go-ldap/ldap/v3 v3.4.11 + +require ( + github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a // indirect + github.com/emersion/go-message v0.18.1 // indirect + github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43 // indirect + github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect + github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect + github.com/olekukonko/tablewriter v0.0.5 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/rivo/uniseg v0.4.4 // indirect + github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect + golang.org/x/net v0.38.0 // indirect + golang.org/x/sys v0.31.0 // indirect + golang.org/x/text v0.24.0 // indirect +) + +require ( + github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect + github.com/dustinkirkland/golang-petname v0.0.0-20240428194347-eebcea082ee0 + github.com/emersion/go-imap/v2 v2.0.0-beta.5 + github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect + github.com/go-faker/faker/v4 v4.6.1 + github.com/google/uuid v1.6.0 // indirect + github.com/jhillyerd/enmime v1.3.0 + github.com/rs/zerolog v1.34.0 + golang.org/x/crypto v0.36.0 // indirect + gopkg.in/loremipsum.v1 v1.1.2 +) diff --git a/tests/groupware/go.sum b/tests/groupware/go.sum new file mode 100644 index 0000000000..d2d837f8be --- /dev/null +++ b/tests/groupware/go.sum @@ -0,0 +1,116 @@ +github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8= +github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= +github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7VVbI0o4wBRNQIgn917usHWOd6VAffYI= +github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4= +github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a h1:MISbI8sU/PSK/ztvmWKFcI7UGb5/HQT7B+i3a2myKgI= +github.com/cention-sany/utf7 v0.0.0-20170124080048-26cad61bd60a/go.mod h1:2GxOXOlEPAMFPfp014mK1SWq8G8BN8o7/dfYqJrVGn8= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustinkirkland/golang-petname v0.0.0-20240428194347-eebcea082ee0 h1:aYo8nnk3ojoQkP5iErif5Xxv0Mo0Ga/FR5+ffl/7+Nk= +github.com/dustinkirkland/golang-petname v0.0.0-20240428194347-eebcea082ee0/go.mod h1:8AuBTZBRSFqEYBPYULd+NN474/zZBLP+6WeT5S9xlAc= +github.com/emersion/go-imap/v2 v2.0.0-beta.5 h1:H3858DNmBuXyMK1++YrQIRdpKE1MwBc+ywBtg3n+0wA= +github.com/emersion/go-imap/v2 v2.0.0-beta.5/go.mod h1:BZTFHsS1hmgBkFlHqbxGLXk2hnRqTItUgwjSSCsYNAk= +github.com/emersion/go-message v0.18.1 h1:tfTxIoXFSFRwWaZsgnqS1DSZuGpYGzSmCZD8SK3QA2E= +github.com/emersion/go-message v0.18.1/go.mod h1:XpJyL70LwRvq2a8rVbHXikPgKj8+aI0kGdHlg16ibYA= +github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43 h1:hH4PQfOndHDlpzYfLAAfl63E8Le6F2+EL/cdhlkyRJY= +github.com/emersion/go-sasl v0.0.0-20231106173351-e73c9f7bad43/go.mod h1:iL2twTeMvZnrg54ZoPDNfJaJaqy0xIQFuBdrLsmspwQ= +github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo= +github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0= +github.com/go-faker/faker/v4 v4.6.1 h1:xUyVpAjEtB04l6XFY0V/29oR332rOSPWV4lU8RwDt4k= +github.com/go-faker/faker/v4 v4.6.1/go.mod h1:arSdxNCSt7mOhdk8tEolvHeIJ7eX4OX80wXjKKvkKBY= +github.com/go-ldap/ldap/v3 v3.4.11 h1:4k0Yxweg+a3OyBLjdYn5OKglv18JNvfDykSoI8bW0gU= +github.com/go-ldap/ldap/v3 v3.4.11/go.mod h1:bY7t0FLK8OAVpp/vV6sSlpz3EQDGcQwc8pF0ujLgKvM= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f h1:3BSP1Tbs2djlpprl7wCLuiqMaUh5SJkkzI2gDs+FgLs= +github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f/go.mod h1:Pcatq5tYkCW2Q6yrR2VRHlbHpZ/R4/7qyL1TCF7vl14= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056 h1:iCHtR9CQyktQ5+f3dMVZfwD2KWJUgm7M0gdL9NGr8KA= +github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056/go.mod h1:CVKlgaMiht+LXvHG173ujK6JUhZXKb2u/BQtjPDIvyk= +github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= +github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= +github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= +github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM= +github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= +github.com/jcmturner/gofork v1.7.6/go.mod h1:1622LH6i/EZqLloHfE7IeZ0uEJwMSUyQ/nDd82IeqRo= +github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o= +github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= +github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh687T8= +github.com/jcmturner/gokrb5/v8 v8.4.4/go.mod h1:1btQEpgT6k+unzCwX1KdWMEwPPkkgBtP+F6aCACiMrs= +github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= +github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jhillyerd/enmime v1.3.0 h1:LV5kzfLidiOr8qRGIpYYmUZCnhrPbcFAnAFUnWn99rw= +github.com/jhillyerd/enmime v1.3.0/go.mod h1:6c6jg5HdRRV2FtvVL69LjiX1M8oE0xDX9VEhV3oy4gs= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= +github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= +github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY= +github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ= +github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo= +github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM= +github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= +golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/loremipsum.v1 v1.1.2 h1:12APklfJKuGszqZsrArW5QoQh03/W+qyCCjvnDuS6Tw= +gopkg.in/loremipsum.v1 v1.1.2/go.mod h1:TuRvzFuzuejXj+odBU6Tubp/EPUyGb9wmSvHenyP2Ts= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/tests/groupware/groupware.ts b/tests/groupware/groupware.ts new file mode 100644 index 0000000000..c697c55574 --- /dev/null +++ b/tests/groupware/groupware.ts @@ -0,0 +1,241 @@ +import { SharedArray } from 'k6/data' +import http from 'k6/http' +import encoding from 'k6/encoding' +import exec from 'k6/execution' + +import { randomItem } from 'https://jslib.k6.io/k6-utils/1.2.0/index.js' +import papaparse from 'https://jslib.k6.io/papaparse/5.1.1/index.js' +import { URL } from 'https://jslib.k6.io/url/1.0.0/index.js' +import { check, fail, group } from 'k6' +import { Counter } from 'k6/metrics' + +export const options = { + noConnectionReuse: true, + noVUConnectionReuse: true, + insecureSkipTLSVerify: true, + scenarios: { + rampup: { + executor: 'ramping-vus', + startVUs: 0, + stages: [ + { target: 50, duration: '30s' }, + { target: 75, duration: '30s' }, + { target: 100, duration: '60s' }, + { target: 50, duration: '20s' }, + ], + gracefulRampDown: '10s', + }, + }, +} + +const TEST_USER_NAMES: string|undefined = __ENV.TEST_USER_NAMES +const TEST_USER_PASSWORD: string = __ENV.TEST_USER_PASSWORD ?? 'demo' +const TEST_USER_DOMAIN: string = __ENV.TEST_USER_DOMAIN ?? 'example.org' +const CLOUD_URL: string = __ENV.CLOUD_URL ?? 'https://cloud.opencloud.test' +const KEYCLOAK_URL: string = __ENV.KEYCLOAK_URL ?? 'https://keycloak.opencloud.test/realms/openCloud' +const KEYCLOAK_CLIENT_ID: string = __ENV.KEYCLOAK_CLIENT_ID ?? 'groupware' +const USERS_FILE: string = __ENV.USERS_FILE ?? 'users.csv' +const JWT_EXPIRATION_THRESHOLD_SECONDS: number = parseInt(__ENV.JWT_EXPIRATION_THRESHOLD_SECONDS ?? '2') + +type JwtHeader = { + alg: string + typ: string + kid: string +} + +type JwtPayload = { + exp: number + iat: number +} + +type Jwt = { + header: JwtHeader + payload: JwtPayload + signature: string +} + +function decodeJwt(token: string): Jwt { + const parts = token.split('.') + const header = JSON.parse(encoding.b64decode(parts[0], 'rawurl', 's')) as JwtHeader + const payload = JSON.parse(encoding.b64decode(parts[1], 'rawurl', 's')) as JwtPayload + const signature = encoding.b64decode(parts[2], 'rawurl', 's') + return {header: header, payload: payload, signature: signature} as Jwt +} + +type User = { + name: string + password: string + mail: string +} + +type Identity = { + id: string + name: string + email: string + replyTo: string | undefined + bcc: string | undefined + textSignature: string | undefined + htmlSignature: string | undefined + mayDelete: boolean +} + +type IdentityGetResponse = { + accountId: string + state: string + list: Identity[] + notFound: string[] | undefined +} + +type VacationResponseGetResponse = { + accountId: string + state: string + notFound: string[] +} + +type EmailAddress = { + name: string | undefined + address: string +} + +type Message = { + '@odata.etag': string + id: string + createdDateTime: string + receivedDateTime: string + sentDateTime: string + internetMessageId: string + subject: string + bodyPreview: string + from: EmailAddress | undefined + toRecipients: EmailAddress[] + ccRecipients: EmailAddress[] + parentFolderId: string + conversationId: string + webLink: string +} + +type Messages = { + '@odata.context': string + value: Message[] +} + +function token(user: User): string { + const res = http.post(`${KEYCLOAK_URL}/protocol/openid-connect/token`, { + client_id: KEYCLOAK_CLIENT_ID, + scope: 'openid', + grant_type: 'password', + username: user.name, + password: user.password, + }) + if (res.status !== 200) { + fail(`failed to retrieve token for ${user.name}: ${res.status} ${res.status_text}`) + } + const accessToken = res.json('access_token')?.toString() + if (accessToken === undefined) { + fail(`access token is empty for ${user.name}`) + } else { + return accessToken + } +} + +function authenticate(user: User): Auth { + const raw = token(user) + const jwt = decodeJwt(raw) + return {raw: raw, jwt: jwt} as Auth +} + +const users: User[] = new SharedArray('users', function () { + if (TEST_USER_NAMES) { + return TEST_USER_NAMES.split(',').map((name) => { return {name: name, password: TEST_USER_PASSWORD, mail: `${name}@${TEST_USER_DOMAIN}`} as User }) + } else { + return papaparse.parse(open(USERS_FILE), { header: true, skipEmptyLines: true, }).data.map((row:object) => row as User) + } +}) + +type Auth = { + raw: string + jwt: Jwt +} + +type TestData = { + auth: object +} + +export function setup(): TestData { + const auth = {} + for (const user of users) { + const a = authenticate(user) + auth[user.name] = a + } + return { + auth: auth, + } as TestData +} + +const stalwartIdRegex = /^[0-9a-z]+$/ + +export default function testSuite(data: TestData) { + const user = randomItem(users) as User + let auth = data.auth[user.name] + + if (auth === undefined) { + fail(`missing authentication for user ${user.name}`) + } + const now = Math.floor(Date.now() / 1000) + if (auth.jwt.payload.exp - now < JWT_EXPIRATION_THRESHOLD_SECONDS) { + exec.test.abort(`token is expired for ${user.name}, need to renew`) + } + + group('retrieve user identity using /me/identity', () => { + const res = http.get(`${CLOUD_URL}/graph/v1.0/me/identity`, {headers: {Authorization: `Bearer ${auth.raw}`}}) + check(res, { + 'is status 200': (r) => r.status === 200, + }); + + const response = res.json() as IdentityGetResponse + check(response, { + 'identity response has an accountId': r => r.accountId !== undefined && stalwartIdRegex.test(r.accountId), + 'identity response has a state': r => r.state !== undefined && stalwartIdRegex.test(r.state), + 'identity response has an empty notFound': r => r.notFound === undefined, + 'identity response has one identity item in its list': r => r.list && r.list.length === 1, + 'identity response has one identity item with an id': r => r.list && r.list.length === 1 && stalwartIdRegex.test(r.list[0].id), + 'identity response has one identity item with a name': r => r.list && r.list.length === 1 && r.list[0].name !== undefined, + 'identity response has one identity item with the expected email': r => r.list && r.list.length === 1 && r.list[0].email === user.mail, + 'identity response has one identity item with mayDelete=true': r => r.list && r.list.length === 1 && r.list[0].mayDelete === true, + 'identity response has one identity item with an empty replyTo': r => r.list && r.list.length === 1 && r.list[0].replyTo === undefined, + 'identity response has one identity item with an empty bcc': r => r.list && r.list.length === 1 && r.list[0].bcc === undefined, + 'identity response has one identity item with an empty textSignature': r => r.list && r.list.length === 1 && r.list[0].textSignature === undefined, + 'identity response has one identity item with an empty htmlSignature': r => r.list && r.list.length === 1 && r.list[0].htmlSignature === undefined, + }) + }) + + group('retrieve user vacationresponse using /me/vacation', () => { + const res = http.get(`${CLOUD_URL}/graph/v1.0/me/vacation`, {headers: {Authorization: `Bearer ${auth.raw}`}}) + check(res, { + 'is status 200': (r) => r.status === 200, + }); + + const response = res.json() as VacationResponseGetResponse + check(response, { + 'vacation response has an accountId': r => r.accountId !== undefined && stalwartIdRegex.test(r.accountId), + 'vacation response has a state': r => r.state !== undefined && stalwartIdRegex.test(r.state), + 'vacation response has a notFound that only contains "singleton"': r => r.notFound && r.notFound.length == 1 && r.notFound[0] == 'singleton', + }) + }) + + group('retrieve user top message using /me/messages', () => { + const url = new URL(`${CLOUD_URL}/graph/v1.0/me/messages`) + url.searchParams.append('$top', '1') + const res = http.get(url.toString(), {headers: {Authorization: `Bearer ${auth.raw}`}}) + check(res, { + 'is status 200': (r) => r.status === 200, + }); + + const response = res.json() as Messages + check(response, { + 'messages has a context': r => r['@odata.context'] !== undefined, + 'messages has a value with a length of 0 or 1': r => r.value !== undefined && (r.value.length === 0 || r.value.length === 1), + 'if there is a message, it has a subject': r => r.value !== undefined && (r.value.length === 0 || r.value[0].subject !== ''), + }) + }) +} diff --git a/tests/groupware/package-lock.json b/tests/groupware/package-lock.json new file mode 100644 index 0000000000..2ad53896e1 --- /dev/null +++ b/tests/groupware/package-lock.json @@ -0,0 +1,23 @@ +{ + "name": "groupware", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "groupware", + "version": "1.0.0", + "license": "ASL", + "devDependencies": { + "@types/k6": "^1.0.2" + } + }, + "node_modules/@types/k6": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/k6/-/k6-1.0.2.tgz", + "integrity": "sha512-bpja1c7OXIJk06aPGN+Aw5f3QhP0PjvgX2Fwfa3rJUaUI+O1ZE3491g9hMjhH21ZlTaAhz7h6veyxaz/RqPUTg==", + "dev": true, + "license": "MIT" + } + } +} diff --git a/tests/groupware/package.json b/tests/groupware/package.json new file mode 100644 index 0000000000..a350fe477d --- /dev/null +++ b/tests/groupware/package.json @@ -0,0 +1,16 @@ +{ + "name": "groupware", + "version": "1.0.0", + "main": "groupware.ts", + "type": "module", + "scripts": { + "test": "k6 run groupware.ts" + }, + "keywords": [], + "author": "Pascal Bleser ", + "license": "ASL", + "description": "", + "devDependencies": { + "@types/k6": "^1.0.2" + } +} diff --git a/tests/groupware/setup.go b/tests/groupware/setup.go new file mode 100644 index 0000000000..94cba4aaa4 --- /dev/null +++ b/tests/groupware/setup.go @@ -0,0 +1,568 @@ +package main + +import ( + "bytes" + crand "crypto/rand" + "crypto/sha1" + "crypto/tls" + "encoding/base64" + "encoding/csv" + "encoding/json" + "fmt" + "io" + "math/rand" + "os" + "path" + "regexp" + "slices" + "strconv" + "strings" + "time" + + "net/http" + "net/mail" + "net/url" + + petname "github.com/dustinkirkland/golang-petname" + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/imapclient" + "github.com/go-faker/faker/v4" + "github.com/go-ldap/ldap/v3" + "github.com/jhillyerd/enmime" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "golang.org/x/text/cases" + "golang.org/x/text/language" + "gopkg.in/loremipsum.v1" +) + +var usersToKeep = []string{"lynn", "alan", "mary", "margaret"} + +const displayNameMark = "$generated" + +func enabled(value string) bool { + value = strings.ToLower(value) + return value == "true" || value == "on" || value == "1" +} + +func config(key string, defaultValue string) string { + value, ok := os.LookupEnv(key) + if ok { + return value + } else { + return defaultValue + } +} + +func iconfig(log *zerolog.Logger, key string, defaultValue int) int { + value, ok := os.LookupEnv(key) + if ok { + result, err := strconv.Atoi(value) + if err != nil { + log.Fatal().Msgf("invalid value for %v is not numeric: '%v'", key, value) + panic(err) + } else { + return result + } + } else { + return defaultValue + } +} + +func hashPassword(clear string, saltSize int) string { + salt := make([]byte, saltSize) + crand.Read(salt) + sha := sha1.New() + sha.Write([]byte(clear)) + sha.Write([]byte(salt)) + digest := sha.Sum(nil) + combined := append(digest, salt...) + return "{SSHA}" + base64.StdEncoding.EncodeToString(combined) +} + +const passwordCharset = "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + "0123456789" + +var seededRand *rand.Rand = rand.New(rand.NewSource(time.Now().UnixNano())) + +func randomPassword() string { + length := 8 + rand.Intn(32) + b := make([]byte, length) + for i := range b { + b[i] = passwordCharset[seededRand.Intn(len(passwordCharset))] + } + return string(b) +} + +func htmlJoin(parts []string) []string { + var result []string + for i := range parts { + result = append(result, fmt.Sprintf("

%v

", parts[i])) + } + return result +} + +var paraSplitter = regexp.MustCompile("[\r\n]+") + +func htmlFormat(body string, msg enmime.MailBuilder) enmime.MailBuilder { + return msg.HTML([]byte(strings.Join(htmlJoin(paraSplitter.Split(body, -1)), "\n"))) +} + +func textFormat(body string, msg enmime.MailBuilder) enmime.MailBuilder { + return msg.Text([]byte(body)) +} + +func bothFormat(body string, msg enmime.MailBuilder) enmime.MailBuilder { + msg = htmlFormat(body, msg) + msg = textFormat(body, msg) + return msg +} + +var formats = []func(string, enmime.MailBuilder) enmime.MailBuilder{ + htmlFormat, + textFormat, + bothFormat, +} + +func fill(i *imapclient.Client, folder string, count int, uid string, clearPassword string, displayName string, domain string, ccEvery int, bccEvery int) { + err := i.Login(uid, clearPassword).Wait() + if err != nil { + panic(err) + } + + selectOptions := &imap.SelectOptions{ReadOnly: false} + _, err = i.Select(folder, selectOptions).Wait() + if err != nil { + panic(err) + } + + toName := displayName + toAddress := fmt.Sprintf("%s@%s", uid, domain) + ccName1 := "Team Lead" + ccAddress1 := fmt.Sprintf("lead@%s", domain) + ccName2 := "Coworker" + ccAddress2 := fmt.Sprintf("coworker@%s", domain) + bccName := "HR" + bccAddress := fmt.Sprintf("corporate@%s", domain) + titler := cases.Title(language.English, cases.NoLower) + + loremIpsumGenerator := loremipsum.New() + for n := range count { + first := petname.Adjective() + last := petname.Adverb() + messageId := fmt.Sprintf("%d.%d@%s", time.Now().Unix(), 1000000+rand.Intn(8999999), domain) + + format := formats[n%len(formats)] + + text := loremIpsumGenerator.Paragraphs(2 + rand.Intn(9)) + from := fmt.Sprintf("%s.%s@%s", strings.ToLower(first), strings.ToLower(last), domain) + sender := fmt.Sprintf("%s %s <%s.%s@%s>", titler.String(first), titler.String(last), strings.ToLower(first), strings.ToLower(last), domain) + + msg := enmime.Builder(). + From(titler.String(first)+" "+titler.String(last), from). + Subject(titler.String(loremIpsumGenerator.Words(3+rand.Intn(7)))). + Header("Message-ID", messageId). + Header("Sender", sender). + To(toName, toAddress) + + if n%ccEvery == 0 { + msg = msg.CCAddrs([]mail.Address{{Name: ccName1, Address: ccAddress1}, {Name: ccName2, Address: ccAddress2}}) + } + if n%bccEvery == 0 { + msg = msg.BCC(bccName, bccAddress) + } + + msg = format(text, msg) + + buf := new(bytes.Buffer) + part, _ := msg.Build() + part.Encode(buf) + mail := buf.String() + + size := int64(len(mail)) + appendCmd := i.Append(folder, size, nil) + if _, err := appendCmd.Write([]byte(mail)); err != nil { + log.Error().Err(err).Str("uid", uid).Msg("imap: failed to append message") + } + if err := appendCmd.Close(); err != nil { + log.Error().Err(err).Str("uid", uid).Msg("imap: failed to close append command") + } + if _, err := appendCmd.Wait(); err != nil { + log.Error().Err(err).Str("uid", uid).Msg("imap: append command failed") + } + } + + if err = i.Logout().Wait(); err != nil { + panic(err) + } +} + +type User struct { + uid string + password string +} + +type PrincipalRoles []string + +func (r PrincipalRoles) MarshalZerologArray(a *zerolog.Array) { + for _, role := range r { + a.Str(role) + } +} + +type Principal struct { + Id int `json:"id,omitempty"` + Type string `json:"type,omitempty"` + Emails []string `json:"emails,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Roles PrincipalRoles `json:"roles,omitempty"` + Secrets []string `json:"secrets,omitempty"` +} + +type Principals struct { + Data struct { + Items []Principal `json:"items,omitempty"` + } `json:"data,omitzero"` + Total int `json:"total,omitempty"` +} + +type StalwartOAuthRequest struct { + Type string `json:"type"` + ClientId string `json:"client_id"` + RedirectUri string `json:"redirect_uri"` + Nonce string `json:"nonce"` +} + +func activateUsersInStalwart(_ *zerolog.Logger, baseurl string, users []User) []User { + var h *http.Client + { + tr := &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}} + h = &http.Client{Transport: tr} + } + u, err := url.Parse(baseurl) + if err != nil { + panic(err) + } + u.Path = path.Join(u.Path, "api", "oauth") + + activated := []User{} + for _, user := range users { + oauth := StalwartOAuthRequest{Type: "code", ClientId: "groupware", RedirectUri: "stalwart://auth", Nonce: "aaa"} + body, err := json.Marshal(oauth) + if err != nil { + panic(err) + } + req, err := http.NewRequest("POST", u.String(), bytes.NewReader(body)) + if err != nil { + panic(err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + req.SetBasicAuth(user.uid, user.password) + resp, err := h.Do(req) + if err != nil { + panic(err) + } + defer func(r *http.Response) { + r.Body.Close() + }(resp) + if resp.StatusCode == 200 { + activated = append(activated, user) + } else { + panic(fmt.Errorf("the Stalwart API response is not 200 but %v %v", resp.StatusCode, resp.Status)) + } + _, err = io.ReadAll(resp.Body) + if err != nil { + panic(err) + } + } + return activated +} + +func cleanStalwart(log *zerolog.Logger, baseurl string, adminUsername string, adminPassword string) []Principal { + var h *http.Client + { + tr := &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: true}} + h = &http.Client{Transport: tr} + } + + var principals Principals + { + u, err := url.Parse(baseurl) + if err != nil { + panic(err) + } + u.Path = path.Join(u.Path, "api", "principal") + req, err := http.NewRequest("GET", u.String(), nil) + if err != nil { + panic(err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + req.SetBasicAuth(adminUsername, adminPassword) + resp, err := h.Do(req) + if err != nil { + panic(err) + } + defer func(r *http.Response) { + r.Body.Close() + }(resp) + if resp.StatusCode != 200 { + panic(fmt.Errorf("the Stalwart API response is not 200 but %v %v", resp.StatusCode, resp.Status)) + } + body, err := io.ReadAll(resp.Body) + if err != nil { + panic(err) + } + err = json.Unmarshal(body, &principals) + if err != nil { + panic(err) + } + } + + deleted := []Principal{} + for _, principal := range principals.Data.Items { + if principal.Type != "individual" { + log.Debug().Str("name", principal.Name).Str("type", principal.Type).Msgf("stalwart: preserving principal: type is not '%v'", "individual") + continue + } + if !slices.Contains(principal.Roles, "user") { + log.Debug().Str("name", principal.Name).Array("roles", principal.Roles).Msgf("stalwart: preserving principal: does not have the role '%v'", "user") + continue + } + if slices.Contains(usersToKeep, principal.Name) { + log.Debug().Str("name", principal.Name).Msg("stalwart: preserving principal: is a user to keep") + continue + } + if !strings.HasSuffix(principal.Description, displayNameMark) { + log.Debug().Str("name", principal.Name).Str("description", principal.Description).Msgf("stalwart: preserving principal: does not have the description suffix '%v'", displayNameMark) + continue + } + log.Debug().Str("name", principal.Name).Msg("stalwart: will delete principal") + + u, err := url.Parse(baseurl) + if err != nil { + panic(err) + } + // the documentation states "principal_id" but it only works with the principal's name attribute + u.Path = path.Join(u.Path, "api", "principal", principal.Name) // strconv.Itoa(principal.Id)) + + req, err := http.NewRequest("DELETE", u.String(), nil) + if err != nil { + panic(err) + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("Accept", "application/json") + req.SetBasicAuth(adminUsername, adminPassword) + resp, err := h.Do(req) + if err != nil { + panic(err) + } + defer func(r *http.Response) { + r.Body.Close() + }(resp) + if resp.StatusCode != 200 { + panic(fmt.Errorf("the Stalwart API response is not 200 but %v %v", resp.StatusCode, resp.Status)) + } + _, err = io.ReadAll(resp.Body) + if err != nil { + panic(err) + } + deleted = append(deleted, principal) + } + return deleted +} + +func main() { + log := zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr, TimeFormat: time.TimeOnly}).With().Timestamp().Logger() + + fillImapInbox := enabled(config("FILL_IMAP", "true")) + imapHost := config("FILL_IMAP_HOST", "localhost:636") + ccEvery := iconfig(&log, "FILL_IMAP_CC_EVERY", 3) + bccEvery := iconfig(&log, "FILL_IMAP_BCC_EVERY", 2) + folder := config("FILL_IMAP_FOLDER", "Inbox") + imapCount := iconfig(&log, "FILL_IMAP_COUNT", 10) + domain := config("DOMAIN", "example.org") + baseDN := config("BASE_DN", "ou=users,dc=opencloud,dc=eu") + ldapUrl := config("LDAP_URL", "ldaps://localhost:636") + bindDN := config("BIND_DN", "cn=admin,dc=opencloud,dc=eu") + bindPassword := config("BIND_PASSWORD", "admin") + userPassword := config("USER_PASSWORD", "") + usersFile := config("USERS_FILE", "") + count := iconfig(&log, "COUNT", 10) + cleanup := enabled(config("CLEANUP", "true")) + cleanupLdap := enabled(config("CLEANUP_LDAP", strconv.FormatBool(cleanup))) + cleanupStalwart := enabled(config("CLEANUP_STALWART", strconv.FormatBool(cleanup))) + stalwartBaseUrl := config("STALWART_URL", "https://stalwart.opencloud.test") + stalwartAdminUser := config("STALWART_ADMIN_USER", "mailadmin") + stalwartAdminPassword := config("STALWART_ADMIN_PASSWORD", "admin") + activateStalwart := enabled(config("ACTIVATE_STALWART", "true")) + saltSize := iconfig(&log, "SALT_SIZE", 16) + + l, err := ldap.DialURL(ldapUrl, ldap.DialWithTLSConfig(&tls.Config{InsecureSkipVerify: true})) + if err != nil { + log.Fatal().Err(err).Str("url", ldapUrl).Msg("failed to connect to LDAP server") + panic(err) + } + err = l.Bind(bindDN, bindPassword) + if err != nil { + log.Fatal().Err(err).Str("url", ldapUrl).Str("bindDN", bindDN).Msg("failed to authenticate to LDAP server") + panic(err) + } + + var i *imapclient.Client + if fillImapInbox { + i, err := imapclient.DialTLS(imapHost, &imapclient.Options{TLSConfig: &tls.Config{InsecureSkipVerify: true}}) + if err != nil { + log.Fatal().Err(err).Str("host", imapHost).Msg("failed to connect to IMAP server") + panic(err) + } + defer func(imap *imapclient.Client) { + err := imap.Close() + if err != nil { + log.Warn().Err(err).Msg("failed to close IMAP connection") + } + }(i) + } else { + i = nil + } + + if cleanupStalwart { + deleted := cleanStalwart(&log, stalwartBaseUrl, stalwartAdminUser, stalwartAdminPassword) + log.Info().Msgf("deleted %v principals from Stalwart", len(deleted)) + } + + if cleanupLdap { + deleted := []string{} + { + llog := log.With().Str("url", ldapUrl).Logger() + llog.Debug().Msg("ldap: cleaning up LDAP") + filter := fmt.Sprintf("(&(objectClass=inetOrgPerson)(description=%v))", ldap.EscapeFilter(displayNameMark)) + existing, err := l.Search(ldap.NewSearchRequest( + baseDN, + ldap.ScopeSingleLevel, + ldap.NeverDerefAliases, + 0, 0, false, + filter, + []string{"uid"}, + []ldap.Control{}, + )) + if err != nil { + llog.Fatal().Err(err).Str("filter", filter).Msg("ldap: failed to perform search query") + panic(err) + } + + for _, entry := range existing.Entries { + uid := entry.GetAttributeValue("uid") + if slices.Contains(usersToKeep, uid) { + llog.Debug().Str("uid", uid).Msg("ldap: preserving user: in list of users to keep") + continue + } + err = l.Del(ldap.NewDelRequest(entry.DN, []ldap.Control{})) + if err != nil { + llog.Fatal().Err(err).Msg("ldap: failed to delete entry") + panic(err) + } + deleted = append(deleted, uid) + llog.Debug().Str("dn", entry.DN).Msg("ldap: deleted user entry") + } + } + log.Info().Msgf("ldap: deleted %v user entries", len(deleted)) + } + + created := []User{} + { + var flog zerolog.Logger + if usersFile != "" { + flog = log.With().Str("filename", usersFile).Logger() + } else { + flog = log + } + llog := log.With().Str("url", ldapUrl).Logger() + + var d io.Writer + { + if usersFile != "" { + f, err := os.OpenFile(usersFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) + if err != nil { + flog.Fatal().Err(err).Msg("failed to open/create users output CSV file") + panic(err) + } + defer f.Close() + d = f + } else { + d = os.Stdout + } + } + w := csv.NewWriter(d) + w.Comma = ';' + w.UseCRLF = false + err = w.Write([]string{"name", "password", "mail"}) + if err != nil { + flog.Fatal().Err(err).Msg("failed to open/create users output CSV file") + panic(err) + } + for range count { + cn := strings.ToLower(faker.Username()) + uid := cn + gn := faker.FirstName() + sn := faker.LastName() + mailAddress := fmt.Sprintf("%s@%s", uid, domain) + dn := fmt.Sprintf("uid=%s,%s", uid, baseDN) + displayName := fmt.Sprintf("%s %s %s", gn, sn, displayNameMark) + description := displayNameMark + var clearPassword string + if userPassword != "" { + clearPassword = userPassword + } else { + clearPassword = randomPassword() + } + hashedPassword := hashPassword(clearPassword, saltSize) + err = l.Add(&ldap.AddRequest{ + DN: dn, + Attributes: []ldap.Attribute{ + {Type: "objectClass", Vals: []string{"inetOrgPerson", "organizationalPerson", "person", "top"}}, + {Type: "cn", Vals: []string{cn}}, + {Type: "sn", Vals: []string{sn}}, + {Type: "givenName", Vals: []string{gn}}, + {Type: "mail", Vals: []string{mailAddress}}, + {Type: "displayName", Vals: []string{displayName}}, + {Type: "description", Vals: []string{description}}, + {Type: "userPassword", Vals: []string{hashedPassword}}, + }, + }) + if err != nil { + llog.Fatal().Err(err).Str("uid", uid).Msg("failed to add entry") + panic(err) + } + err = w.Write([]string{uid, clearPassword, mailAddress}) + if err != nil { + flog.Fatal().Err(err).Str("uid", uid).Msg("failed to write entry to CSV") + panic(err) + } + + if i != nil && imapCount > 0 { + fill(i, folder, imapCount, uid, clearPassword, displayName, domain, ccEvery, bccEvery) + } + created = append(created, User{uid: uid, password: clearPassword}) + } + w.Flush() + if err := w.Error(); err != nil { + flog.Fatal().Err(err).Msg("failed to flush CSV") + panic(err) + } + + { + zev := log.Info() + if usersFile != "" { + zev = zev.Str("filename", usersFile) + } + zev.Msgf("ldap: added %v users", len(created)) + } + } + + if activateStalwart && len(created) > 0 { + activated := activateUsersInStalwart(&log, stalwartBaseUrl, created) + log.Info().Msgf("stalwart: activated %v users", len(activated)) + } +} diff --git a/vendor/github.com/MicahParks/jwkset/.gitignore b/vendor/github.com/MicahParks/jwkset/.gitignore new file mode 100644 index 0000000000..040ac50a4b --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/.gitignore @@ -0,0 +1,2 @@ +config.*json +node_modules diff --git a/vendor/github.com/MicahParks/jwkset/LICENSE b/vendor/github.com/MicahParks/jwkset/LICENSE new file mode 100644 index 0000000000..05f2ccbd2a --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Micah Parks + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/MicahParks/jwkset/README.md b/vendor/github.com/MicahParks/jwkset/README.md new file mode 100644 index 0000000000..e9d511eb3b --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/README.md @@ -0,0 +1,133 @@ +[![Go Reference](https://pkg.go.dev/badge/github.com/MicahParks/jwkset.svg)](https://pkg.go.dev/github.com/MicahParks/jwkset) + +# JWK Set (JSON Web Key Set) + +This is a JWK Set (JSON Web Key Set) implementation written in Golang. + +The goal of this project is to provide a complete implementation of JWK and JWK Sets within the constraints of the +Golang standard library, without implementing any cryptographic algorithms. For example, `Ed25519` is supported, but +`Ed448` is not, because the Go standard library does not have a high level implementation of `Ed448`. + +If you would like to generate or validate a JWK without writing any Golang code, please visit +the [Generate a JWK Set](#generate-a-jwk-set) section. + +If you would like to have a JWK Set client to help verify JWTs without writing any Golang code, you can use the +[JWK Set Client Proxy (JCP) project](https://github.com/MicahParks/jcp) perform JWK Set client operations in the +language of your choice using an OpenAPI interface. + +# Generate a JWK Set + +If you would like to generate a JWK Set without writing Golang code, this project publishes utilities to generate a JWK +Set from: + +* PEM encoded X.509 Certificates +* PEM encoded public keys +* PEM encoded private keys + +The PEM block type is used to infer which key type to decode. Reference the [Supported keys](#supported-keys) section +for a list of supported cryptographic key types. + +## Website + +Visit [https://jwkset.com](https://jwkset.com) to use the web interface for this project. You can self-host this website +by following the instructions in the `README.md` in +the [website](https://github.com/MicahParks/jwkset/tree/master/website) directory. + +## Command line + +Gather your PEM encoded keys or certificates and use the `cmd/jwksetinfer` command line tool to generate a JWK Set. + +**Install** + +``` +go install github.com/MicahParks/jwkset/cmd/jwksetinfer@latest +``` + +**Usage** + +``` +jwksetinfer mykey.pem mycert.crt +``` + +## Custom server + +This project can be used in creating a custom JWK Set server. A good place to start is `examples/http_server/main.go`. + +# Golang JWK Set client + +If you are using [`github.com/golang-jwt/jwt/v5`](https://github.com/golang-jwt/jwt) take a look +at [`github.com/MicahParks/keyfunc/v3`](https://github.com/MicahParks/keyfunc). + +This project can be used to create JWK Set clients. An HTTP client is provided. See a snippet of the usage +from `examples/default_http_client/main.go` below. + +## Create a JWK Set client from the server's HTTP URL. + +```go +jwks, err := jwkset.NewDefaultHTTPClient([]string{server.URL}) +if err != nil { + log.Fatalf("Failed to create client JWK set. Error: %s", err) +} +``` + +## Read a key from the client. + +```go +jwk, err = jwks.KeyRead(ctx, myKeyID) +if err != nil { + log.Fatalf("Failed to read key from client JWK set. Error: %s", err) +} +``` + +# Supported keys + +This project supports the following key types: + +* [Edwards-curve Digital Signature Algorithm (EdDSA)](https://en.wikipedia.org/wiki/EdDSA) (Ed25519 only) + * Go Types: `ed25519.PrivateKey` and `ed25519.PublicKey` +* [Elliptic-curve Diffie–Hellman (ECDH)](https://en.wikipedia.org/wiki/Elliptic-curve_Diffie%E2%80%93Hellman) (X25519 + only) + * Go Types: `*ecdh.PrivateKey` and `*ecdh.PublicKey` +* [Elliptic Curve Digital Signature Algorithm (ECDSA)](https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm) + * Go Types: `*ecdsa.PrivateKey` and `*ecdsa.PublicKey` +* [Rivest–Shamir–Adleman (RSA)](https://en.wikipedia.org/wiki/RSA_(cryptosystem)) + * Go Types: `*rsa.PrivateKey` and `*rsa.PublicKey` +* [HMAC](https://en.wikipedia.org/wiki/HMAC), [AES Key Wrap](https://en.wikipedia.org/wiki/Key_Wrap), and other + symmetric keys + * Go Type: `[]byte` + +Cryptographic keys can be added, deleted, and read from the JWK Set. A JSON representation of the JWK Set can be created +for hosting via HTTPS. This project includes an in-memory storage implementation, but an interface is provided for more +advanced use cases. + +# Notes + +This project aims to implement the relevant RFCs to the fullest extent possible using the Go standard library, but does +not implement any cryptographic algorithms itself. + +* RFC 8037 adds support for `Ed448`, `X448`, and `secp256k1`, but there is no Golang standard library support for these + key types. +* In order to be compatible with non-RFC compliant JWK Set providers, this project does not strictly enforce JWK + parameters that are integers and have extra or missing leading padding. See the release notes + of [`v0.5.15`](https://github.com/MicahParks/jwkset/releases/tag/v0.5.15) for details. +* `Base64url Encoding` requires that all trailing `=` characters be removed. This project automatically strips any + trailing `=` characters in an attempt to be compatible with improper implementations of JWK. +* This project does not currently support JWK Set encryption using JWE. This would involve implementing the relevant JWE + specifications. It may be implemented in the future if there is interest. Open a GitHub issue to express interest. + +# Related projects + +## [`github.com/MicahParks/keyfunc`](https://github.com/MicahParks/keyfunc) + +A JWK Set client for the [`github.com/golang-jwt/jwt/v5`](https://github.com/golang-jwt/jwt) project. + +## [`github.com/MicahParks/jcp`](https://github.com/MicahParks/jcp) + +A JWK Set client proxy. JCP for short. This project is a standalone service that uses keyfunc under the hood. It +primarily exists for these use cases: + +The language or shell a program is written in does not have an adequate JWK Set client. Validate JWTs with curl? Why +not? +Restrictive networking policies prevent a program from accessing the remote JWK Set directly. +Many co-located services need to validate JWTs that were signed by a key that lives in a remote JWK Set. +If you can integrate keyfunc directly into your program, you likely don't need JCP. diff --git a/vendor/github.com/MicahParks/jwkset/constants.go b/vendor/github.com/MicahParks/jwkset/constants.go new file mode 100644 index 0000000000..15e219e0d1 --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/constants.go @@ -0,0 +1,167 @@ +package jwkset + +const ( + // HeaderKID is a JWT header for the key ID. + HeaderKID = "kid" +) + +// These are string constants set in https://www.iana.org/assignments/jose/jose.xhtml +// See their respective types for more information. +const ( + AlgHS256 ALG = "HS256" + AlgHS384 ALG = "HS384" + AlgHS512 ALG = "HS512" + AlgRS256 ALG = "RS256" + AlgRS384 ALG = "RS384" + AlgRS512 ALG = "RS512" + AlgES256 ALG = "ES256" + AlgES384 ALG = "ES384" + AlgES512 ALG = "ES512" + AlgPS256 ALG = "PS256" + AlgPS384 ALG = "PS384" + AlgPS512 ALG = "PS512" + AlgNone ALG = "none" + AlgRSA1_5 ALG = "RSA1_5" + AlgRSAOAEP ALG = "RSA-OAEP" + AlgRSAOAEP256 ALG = "RSA-OAEP-256" + AlgA128KW ALG = "A128KW" + AlgA192KW ALG = "A192KW" + AlgA256KW ALG = "A256KW" + AlgDir ALG = "dir" + AlgECDHES ALG = "ECDH-ES" + AlgECDHESA128KW ALG = "ECDH-ES+A128KW" + AlgECDHESA192KW ALG = "ECDH-ES+A192KW" + AlgECDHESA256KW ALG = "ECDH-ES+A256KW" + AlgA128GCMKW ALG = "A128GCMKW" + AlgA192GCMKW ALG = "A192GCMKW" + AlgA256GCMKW ALG = "A256GCMKW" + AlgPBES2HS256A128KW ALG = "PBES2-HS256+A128KW" + AlgPBES2HS384A192KW ALG = "PBES2-HS384+A192KW" + AlgPBES2HS512A256KW ALG = "PBES2-HS512+A256KW" + AlgA128CBCHS256 ALG = "A128CBC-HS256" + AlgA192CBCHS384 ALG = "A192CBC-HS384" + AlgA256CBCHS512 ALG = "A256CBC-HS512" + AlgA128GCM ALG = "A128GCM" + AlgA192GCM ALG = "A192GCM" + AlgA256GCM ALG = "A256GCM" + AlgEdDSA ALG = "EdDSA" + AlgRS1 ALG = "RS1" // Prohibited. + AlgRSAOAEP384 ALG = "RSA-OAEP-384" + AlgRSAOAEP512 ALG = "RSA-OAEP-512" + AlgA128CBC ALG = "A128CBC" // Prohibited. + AlgA192CBC ALG = "A192CBC" // Prohibited. + AlgA256CBC ALG = "A256CBC" // Prohibited. + AlgA128CTR ALG = "A128CTR" // Prohibited. + AlgA192CTR ALG = "A192CTR" // Prohibited. + AlgA256CTR ALG = "A256CTR" // Prohibited. + AlgHS1 ALG = "HS1" // Prohibited. + AlgES256K ALG = "ES256K" + + CrvP256 CRV = "P-256" + CrvP384 CRV = "P-384" + CrvP521 CRV = "P-521" + CrvEd25519 CRV = "Ed25519" + CrvEd448 CRV = "Ed448" + CrvX25519 CRV = "X25519" + CrvX448 CRV = "X448" + CrvSECP256K1 CRV = "secp256k1" + + KeyOpsSign KEYOPS = "sign" + KeyOpsVerify KEYOPS = "verify" + KeyOpsEncrypt KEYOPS = "encrypt" + KeyOpsDecrypt KEYOPS = "decrypt" + KeyOpsWrapKey KEYOPS = "wrapKey" + KeyOpsUnwrapKey KEYOPS = "unwrapKey" + KeyOpsDeriveKey KEYOPS = "deriveKey" + KeyOpsDeriveBits KEYOPS = "deriveBits" + + KtyEC KTY = "EC" + KtyOKP KTY = "OKP" + KtyRSA KTY = "RSA" + KtyOct KTY = "oct" + + UseEnc USE = "enc" + UseSig USE = "sig" +) + +// ALG is a set of "JSON Web Signature and Encryption Algorithms" types from +// https://www.iana.org/assignments/jose/jose.xhtml as defined in +// https://www.rfc-editor.org/rfc/rfc7518#section-7.1 +type ALG string + +func (alg ALG) IANARegistered() bool { + switch alg { + case AlgHS256, AlgHS384, AlgHS512, AlgRS256, AlgRS384, AlgRS512, AlgES256, AlgES384, AlgES512, AlgPS256, AlgPS384, + AlgPS512, AlgNone, AlgRSA1_5, AlgRSAOAEP, AlgRSAOAEP256, AlgA128KW, AlgA192KW, AlgA256KW, AlgDir, AlgECDHES, + AlgECDHESA128KW, AlgECDHESA192KW, AlgECDHESA256KW, AlgA128GCMKW, AlgA192GCMKW, AlgA256GCMKW, + AlgPBES2HS256A128KW, AlgPBES2HS384A192KW, AlgPBES2HS512A256KW, AlgA128CBCHS256, AlgA192CBCHS384, + AlgA256CBCHS512, AlgA128GCM, AlgA192GCM, AlgA256GCM, AlgEdDSA, AlgRS1, AlgRSAOAEP384, AlgRSAOAEP512, AlgA128CBC, + AlgA192CBC, AlgA256CBC, AlgA128CTR, AlgA192CTR, AlgA256CTR, AlgHS1, AlgES256K, "": + return true + } + return false +} +func (alg ALG) String() string { + return string(alg) +} + +// CRV is a set of "JSON Web Key Elliptic Curve" types from https://www.iana.org/assignments/jose/jose.xhtml as +// mentioned in https://www.rfc-editor.org/rfc/rfc7518.html#section-6.2.1.1 +type CRV string + +func (crv CRV) IANARegistered() bool { + switch crv { + case CrvP256, CrvP384, CrvP521, CrvEd25519, CrvEd448, CrvX25519, CrvX448, CrvSECP256K1, "": + return true + } + return false +} +func (crv CRV) String() string { + return string(crv) +} + +// KEYOPS is a set of "JSON Web Key Operations" from https://www.iana.org/assignments/jose/jose.xhtml as mentioned in +// https://www.rfc-editor.org/rfc/rfc7517#section-4.3 +type KEYOPS string + +func (keyopts KEYOPS) IANARegistered() bool { + switch keyopts { + case KeyOpsSign, KeyOpsVerify, KeyOpsEncrypt, KeyOpsDecrypt, KeyOpsWrapKey, KeyOpsUnwrapKey, KeyOpsDeriveKey, + KeyOpsDeriveBits: + return true + } + return false +} +func (keyopts KEYOPS) String() string { + return string(keyopts) +} + +// KTY is a set of "JSON Web Key Types" from https://www.iana.org/assignments/jose/jose.xhtml as mentioned in +// https://www.rfc-editor.org/rfc/rfc7517#section-4.1 +type KTY string + +func (kty KTY) IANARegistered() bool { + switch kty { + case KtyEC, KtyOKP, KtyRSA, KtyOct: + return true + } + return false +} +func (kty KTY) String() string { + return string(kty) +} + +// USE is a set of "JSON Web Key Use" types from https://www.iana.org/assignments/jose/jose.xhtml as mentioned in +// https://www.rfc-editor.org/rfc/rfc7517#section-4.2 +type USE string + +func (use USE) IANARegistered() bool { + switch use { + case UseEnc, UseSig, "": + return true + } + return false +} +func (use USE) String() string { + return string(use) +} diff --git a/vendor/github.com/MicahParks/jwkset/http.go b/vendor/github.com/MicahParks/jwkset/http.go new file mode 100644 index 0000000000..36f151c240 --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/http.go @@ -0,0 +1,276 @@ +package jwkset + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "log/slog" + "time" + + "golang.org/x/time/rate" +) + +var ( + // ErrNewClient fails to create a new JWK Set client. + ErrNewClient = errors.New("failed to create new JWK Set client") +) + +// HTTPClientOptions are options for creating a new JWK Set client. +type HTTPClientOptions struct { + // Given contains keys known from outside HTTP URLs. + Given Storage + // HTTPURLs are a mapping of HTTP URLs to JWK Set endpoints to storage implementations for the keys located at the + // URL. If empty, HTTP will not be used. + HTTPURLs map[string]Storage + // PrioritizeHTTP is a flag that indicates whether keys from the HTTP URL should be prioritized over keys from the + // given storage. + PrioritizeHTTP bool + // RateLimitWaitMax is the timeout for waiting for rate limiting to end. + RateLimitWaitMax time.Duration + // RefreshUnknownKID is non-nil to indicate that remote HTTP resources should be refreshed if a key with an unknown + // key ID is trying to be read. This makes reading methods block until the context is over, a key with the matching + // key ID is found in a refreshed remote resource, or all refreshes complete. + RefreshUnknownKID *rate.Limiter +} + +// Client is a JWK Set client. +type httpClient struct { + given Storage + httpURLs map[string]Storage + prioritizeHTTP bool + rateLimitWaitMax time.Duration + refreshUnknownKID *rate.Limiter +} + +// NewHTTPClient creates a new JWK Set client from remote HTTP resources. +func NewHTTPClient(options HTTPClientOptions) (Storage, error) { + if options.Given == nil && len(options.HTTPURLs) == 0 { + return nil, fmt.Errorf("%w: no given keys or HTTP URLs", ErrNewClient) + } + for u, store := range options.HTTPURLs { + if store == nil { + var err error + options.HTTPURLs[u], err = NewStorageFromHTTP(u, HTTPClientStorageOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to create HTTP client storage for %q: %w", u, errors.Join(err, ErrNewClient)) + } + } + } + given := options.Given + if given == nil { + given = NewMemoryStorage() + } + c := httpClient{ + given: given, + httpURLs: options.HTTPURLs, + prioritizeHTTP: options.PrioritizeHTTP, + rateLimitWaitMax: options.RateLimitWaitMax, + refreshUnknownKID: options.RefreshUnknownKID, + } + return c, nil +} + +// NewDefaultHTTPClient creates a new JWK Set client with default options from remote HTTP resources. +// +// The default behavior is to: +// 1. Refresh remote HTTP resources every hour. +// 2. Prioritize keys from remote HTTP resources over keys from the given storage. +// 3. Refresh remote HTTP resources if a key with an unknown key ID is trying to be read, with a rate limit of 5 minutes. +// 4. Log to slog.Default() if a refresh fails. +func NewDefaultHTTPClient(urls []string) (Storage, error) { + return NewDefaultHTTPClientCtx(context.Background(), urls) +} + +// NewDefaultHTTPClientCtx is the same as NewDefaultHTTPClient, but with a context that can end the refresh goroutine. +func NewDefaultHTTPClientCtx(ctx context.Context, urls []string) (Storage, error) { + clientOptions := HTTPClientOptions{ + HTTPURLs: make(map[string]Storage), + RateLimitWaitMax: time.Minute, + RefreshUnknownKID: rate.NewLimiter(rate.Every(5*time.Minute), 1), + } + for _, u := range urls { + refreshErrorHandler := func(ctx context.Context, err error) { + slog.Default().ErrorContext(ctx, "Failed to refresh HTTP JWK Set from remote HTTP resource.", + "error", err, + "url", u, + ) + } + options := HTTPClientStorageOptions{ + Ctx: ctx, + NoErrorReturnFirstHTTPReq: true, + RefreshErrorHandler: refreshErrorHandler, + RefreshInterval: time.Hour, + } + c, err := NewStorageFromHTTP(u, options) + if err != nil { + return nil, fmt.Errorf("failed to create HTTP client storage for %q: %w", u, errors.Join(err, ErrNewClient)) + } + clientOptions.HTTPURLs[u] = c + } + return NewHTTPClient(clientOptions) +} + +func (c httpClient) KeyDelete(ctx context.Context, keyID string) (ok bool, err error) { + ok, err = c.given.KeyDelete(ctx, keyID) + if err != nil && !errors.Is(err, ErrKeyNotFound) { + return false, fmt.Errorf("failed to delete key with ID %q from given storage due to error: %w", keyID, err) + } + if ok { + return true, nil + } + for _, store := range c.httpURLs { + ok, err = store.KeyDelete(ctx, keyID) + if err != nil && !errors.Is(err, ErrKeyNotFound) { + return false, fmt.Errorf("failed to delete key with ID %q from HTTP storage due to error: %w", keyID, err) + } + if ok { + return true, nil + } + } + return false, nil +} +func (c httpClient) KeyRead(ctx context.Context, keyID string) (jwk JWK, err error) { + if !c.prioritizeHTTP { + jwk, err = c.given.KeyRead(ctx, keyID) + switch { + case errors.Is(err, ErrKeyNotFound): + // Do nothing. + case err != nil: + return JWK{}, fmt.Errorf("failed to find JWT key with ID %q in given storage due to error: %w", keyID, err) + default: + return jwk, nil + } + } + for _, store := range c.httpURLs { + jwk, err = store.KeyRead(ctx, keyID) + switch { + case errors.Is(err, ErrKeyNotFound): + continue + case err != nil: + return JWK{}, fmt.Errorf("failed to find JWT key with ID %q in HTTP storage due to error: %w", keyID, err) + default: + return jwk, nil + } + } + if c.prioritizeHTTP { + jwk, err = c.given.KeyRead(ctx, keyID) + switch { + case errors.Is(err, ErrKeyNotFound): + // Do nothing. + case err != nil: + return JWK{}, fmt.Errorf("failed to find JWT key with ID %q in given storage due to error: %w", keyID, err) + default: + return jwk, nil + } + } + if c.refreshUnknownKID != nil { + var cancel context.CancelFunc = func() {} + if c.rateLimitWaitMax > 0 { + ctx, cancel = context.WithTimeout(ctx, c.rateLimitWaitMax) + } + defer cancel() + err = c.refreshUnknownKID.Wait(ctx) + if err != nil { + return JWK{}, fmt.Errorf("failed to wait for JWK Set refresh rate limiter due to error: %w", err) + } + for _, store := range c.httpURLs { + s, ok := store.(httpStorage) + if !ok { + continue + } + err = s.refresh(ctx) + if err != nil { + if s.options.RefreshErrorHandler != nil { + s.options.RefreshErrorHandler(ctx, err) + } + continue + } + jwk, err = store.KeyRead(ctx, keyID) + switch { + case errors.Is(err, ErrKeyNotFound): + // Do nothing. + case err != nil: + return JWK{}, fmt.Errorf("failed to find JWT key with ID %q in HTTP storage due to error: %w", keyID, err) + default: + return jwk, nil + } + } + } + return JWK{}, fmt.Errorf("%w %q", ErrKeyNotFound, keyID) +} +func (c httpClient) KeyReadAll(ctx context.Context) ([]JWK, error) { + jwks, err := c.given.KeyReadAll(ctx) + if err != nil { + return nil, fmt.Errorf("failed to snapshot given keys due to error: %w", err) + } + for u, store := range c.httpURLs { + j, err := store.KeyReadAll(ctx) + if err != nil { + return nil, fmt.Errorf("failed to snapshot HTTP keys from %q due to error: %w", u, err) + } + jwks = append(jwks, j...) + } + return jwks, nil +} +func (c httpClient) KeyWrite(ctx context.Context, jwk JWK) error { + return c.given.KeyWrite(ctx, jwk) +} + +func (c httpClient) JSON(ctx context.Context) (json.RawMessage, error) { + m, err := c.combineStorage(ctx) + if err != nil { + return nil, fmt.Errorf("failed to combine storage due to error: %w", err) + } + return m.JSON(ctx) +} +func (c httpClient) JSONPublic(ctx context.Context) (json.RawMessage, error) { + m, err := c.combineStorage(ctx) + if err != nil { + return nil, fmt.Errorf("failed to combine storage due to error: %w", err) + } + return m.JSONPublic(ctx) +} +func (c httpClient) JSONPrivate(ctx context.Context) (json.RawMessage, error) { + m, err := c.combineStorage(ctx) + if err != nil { + return nil, fmt.Errorf("failed to combine storage due to error: %w", err) + } + return m.JSONPrivate(ctx) +} +func (c httpClient) JSONWithOptions(ctx context.Context, marshalOptions JWKMarshalOptions, validationOptions JWKValidateOptions) (json.RawMessage, error) { + m, err := c.combineStorage(ctx) + if err != nil { + return nil, fmt.Errorf("failed to combine storage due to error: %w", err) + } + return m.JSONWithOptions(ctx, marshalOptions, validationOptions) +} +func (c httpClient) Marshal(ctx context.Context) (JWKSMarshal, error) { + m, err := c.combineStorage(ctx) + if err != nil { + return JWKSMarshal{}, fmt.Errorf("failed to combine storage due to error: %w", err) + } + return m.Marshal(ctx) +} +func (c httpClient) MarshalWithOptions(ctx context.Context, marshalOptions JWKMarshalOptions, validationOptions JWKValidateOptions) (JWKSMarshal, error) { + m, err := c.combineStorage(ctx) + if err != nil { + return JWKSMarshal{}, fmt.Errorf("failed to combine storage due to error: %w", err) + } + return m.MarshalWithOptions(ctx, marshalOptions, validationOptions) +} + +func (c httpClient) combineStorage(ctx context.Context) (Storage, error) { + jwks, err := c.KeyReadAll(ctx) + if err != nil { + return nil, fmt.Errorf("failed to snapshot keys due to error: %w", err) + } + m := NewMemoryStorage() + for _, jwk := range jwks { + err = m.KeyWrite(ctx, jwk) + if err != nil { + return nil, fmt.Errorf("failed to write key to memory storage due to error: %w", err) + } + } + return m, nil +} diff --git a/vendor/github.com/MicahParks/jwkset/jwk.go b/vendor/github.com/MicahParks/jwkset/jwk.go new file mode 100644 index 0000000000..9fd55580d1 --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/jwk.go @@ -0,0 +1,494 @@ +package jwkset + +import ( + "bytes" + "context" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/rsa" + "crypto/x509" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "math/big" + "net/http" + "net/url" + "slices" + "time" +) + +var ( + // ErrPadding indicates that there is invalid padding. + ErrPadding = errors.New("padding error") +) + +// JWK represents a JSON Web Key. +type JWK struct { + key any + marshal JWKMarshal + options JWKOptions +} + +// JWKMarshalOptions are used to specify options for JSON marshaling a JWK. +type JWKMarshalOptions struct { + // Private is used to indicate that the JWK's private key material should be JSON marshaled and unmarshalled. This + // includes symmetric and asymmetric keys. Setting this to true is the only way to marshal and unmarshal symmetric + // keys. + Private bool +} + +// JWKX509Options holds the X.509 certificate information for a JWK. This data structure is not used for JSON marshaling. +type JWKX509Options struct { + // X5C contains a chain of one or more PKIX certificates. The PKIX certificate containing the key value MUST be the + // first certificate. + X5C []*x509.Certificate // The PKIX certificate containing the key value MUST be the first certificate. + + // X5T is calculated automatically. + // X5TS256 is calculated automatically. + + // X5U Is a URI that refers to a resource for an X.509 public key certificate or certificate chain. + X5U string // https://www.rfc-editor.org/rfc/rfc7517#section-4.6 +} + +// JWKValidateOptions are used to specify options for validating a JWK. +type JWKValidateOptions struct { + /* + This package intentionally does not confirm if certificate's usage or compare that to the JWK's use parameter. + Please open a GitHub issue if you think this should be an option. + */ + // CheckX509ValidTime is used to indicate that the X.509 certificate's valid time should be checked. + CheckX509ValidTime bool + // GetX5U is used to get and validate the X.509 certificate from the X5U URI. Use DefaultGetX5U for the default + // behavior. + GetX5U func(x5u *url.URL) ([]*x509.Certificate, error) + // SkipAll is used to skip all validation. + SkipAll bool + // SkipKeyOps is used to skip validation of the key operations (key_ops). + SkipKeyOps bool + // SkipMetadata skips checking if the JWKMetadataOptions match the JWKMarshal. + SkipMetadata bool + // SkipUse is used to skip validation of the key use (use). + SkipUse bool + // SkipX5UScheme is used to skip checking if the X5U URI scheme is https. + SkipX5UScheme bool + // StrictPadding is used to indicate that the JWK should be validated with strict padding. + StrictPadding bool +} + +// JWKMetadataOptions are direct passthroughs into the JWKMarshal. +type JWKMetadataOptions struct { + // ALG is the algorithm (alg). + ALG ALG + // KID is the key ID (kid). + KID string + // KEYOPS is the key operations (key_ops). + KEYOPS []KEYOPS + // USE is the key use (use). + USE USE +} + +// JWKOptions are used to specify options for marshaling a JSON Web Key. +type JWKOptions struct { + Marshal JWKMarshalOptions + Metadata JWKMetadataOptions + Validate JWKValidateOptions + X509 JWKX509Options +} + +// NewJWKFromKey uses the given key and options to create a JWK. It is possible to provide a private key with an X.509 +// certificate, which will be validated to contain the correct public key. +func NewJWKFromKey(key any, options JWKOptions) (JWK, error) { + marshal, err := keyMarshal(key, options) + if err != nil { + return JWK{}, fmt.Errorf("failed to marshal JSON Web Key: %w", err) + } + switch key.(type) { + case ed25519.PrivateKey, ed25519.PublicKey: + if options.Metadata.ALG == "" { + options.Metadata.ALG = AlgEdDSA + } else if options.Metadata.ALG != AlgEdDSA { + return JWK{}, fmt.Errorf("%w: invalid ALG for Ed25519 key: %q", ErrOptions, options.Metadata.ALG) + } + } + j := JWK{ + key: key, + marshal: marshal, + options: options, + } + err = j.Validate() + if err != nil { + return JWK{}, fmt.Errorf("failed to validate JSON Web Key: %w", err) + } + return j, nil +} + +// NewJWKFromRawJSON uses the given raw JSON to create a JWK. +func NewJWKFromRawJSON(j json.RawMessage, marshalOptions JWKMarshalOptions, validateOptions JWKValidateOptions) (JWK, error) { + marshal := JWKMarshal{} + err := json.Unmarshal(j, &marshal) + if err != nil { + return JWK{}, fmt.Errorf("failed to unmarshal JSON Web Key: %w", err) + } + return NewJWKFromMarshal(marshal, marshalOptions, validateOptions) +} + +// NewJWKFromMarshal transforms a JWKMarshal into a JWK. +func NewJWKFromMarshal(marshal JWKMarshal, marshalOptions JWKMarshalOptions, validateOptions JWKValidateOptions) (JWK, error) { + j, err := keyUnmarshal(marshal, marshalOptions, validateOptions) + if err != nil { + return JWK{}, fmt.Errorf("failed to unmarshal JSON Web Key: %w", err) + } + err = j.Validate() + if err != nil { + return JWK{}, fmt.Errorf("failed to validate JSON Web Key: %w", err) + } + return j, nil +} + +// NewJWKFromX5C uses the X.509 X5C information in the options to create a JWK. +func NewJWKFromX5C(options JWKOptions) (JWK, error) { + if len(options.X509.X5C) == 0 { + return JWK{}, fmt.Errorf("%w: no X.509 certificates provided", ErrOptions) + } + cert := options.X509.X5C[0] + marshal, err := keyMarshal(cert.PublicKey, options) + if err != nil { + return JWK{}, fmt.Errorf("failed to marshal JSON Web Key: %w", err) + } + + if cert.PublicKeyAlgorithm == x509.Ed25519 { + if options.Metadata.ALG != "" && options.Metadata.ALG != AlgEdDSA { + return JWK{}, fmt.Errorf("%w: ALG in metadata does not match ALG in X.509 certificate", errors.Join(ErrOptions, ErrX509Mismatch)) + } + options.Metadata.ALG = AlgEdDSA + } + + j := JWK{ + key: options.X509.X5C[0].PublicKey, + marshal: marshal, + options: options, + } + err = j.Validate() + if err != nil { + return JWK{}, fmt.Errorf("failed to validate JSON Web Key: %w", err) + } + return j, nil +} + +// NewJWKFromX5U uses the X.509 X5U information in the options to create a JWK. +func NewJWKFromX5U(options JWKOptions) (JWK, error) { + if options.X509.X5U == "" { + return JWK{}, fmt.Errorf("%w: no X.509 URI provided", ErrOptions) + } + u, err := url.ParseRequestURI(options.X509.X5U) + if err != nil { + return JWK{}, fmt.Errorf("failed to parse X5U URI: %w", errors.Join(ErrOptions, err)) + } + if !options.Validate.SkipX5UScheme && u.Scheme != "https" { + return JWK{}, fmt.Errorf("%w: X5U URI scheme must be https", errors.Join(ErrOptions)) + } + get := options.Validate.GetX5U + if get == nil { + get = DefaultGetX5U + } + certs, err := get(u) + if err != nil { + return JWK{}, fmt.Errorf("failed to get X5U URI: %w", err) + } + options.X509.X5C = certs + jwk, err := NewJWKFromX5C(options) + if err != nil { + return JWK{}, fmt.Errorf("failed to create JWK from fetched X5U assets: %w", err) + } + return jwk, nil +} + +// Key returns the public or private cryptographic key associated with the JWK. +func (j JWK) Key() any { + return j.key +} + +// Marshal returns Go type that can be marshalled into JSON. +func (j JWK) Marshal() JWKMarshal { + return j.marshal +} + +// X509 returns the X.509 certificate information for the JWK. +func (j JWK) X509() JWKX509Options { + return j.options.X509 +} + +// Validate validates the JWK. The JWK is automatically validated when created from a function in this package. +func (j JWK) Validate() error { + if j.options.Validate.SkipAll { + return nil + } + if !j.marshal.KTY.IANARegistered() { + return fmt.Errorf("%w: invalid or unsupported key type %q", ErrJWKValidation, j.marshal.KTY) + } + + if !j.options.Validate.SkipUse && !j.marshal.USE.IANARegistered() { + return fmt.Errorf("%w: invalid or unsupported key use %q", ErrJWKValidation, j.marshal.USE) + } + + if !j.options.Validate.SkipKeyOps { + for _, o := range j.marshal.KEYOPS { + if !o.IANARegistered() { + return fmt.Errorf("%w: invalid or unsupported key_opt %q", ErrJWKValidation, o) + } + } + } + + if !j.options.Validate.SkipMetadata { + if j.marshal.ALG != j.options.Metadata.ALG { + return fmt.Errorf("%w: ALG in marshal does not match ALG in options", errors.Join(ErrJWKValidation, ErrOptions)) + } + if j.marshal.KID != j.options.Metadata.KID { + return fmt.Errorf("%w: KID in marshal does not match KID in options", errors.Join(ErrJWKValidation, ErrOptions)) + } + if !slices.Equal(j.marshal.KEYOPS, j.options.Metadata.KEYOPS) { + return fmt.Errorf("%w: KEYOPS in marshal does not match KEYOPS in options", errors.Join(ErrJWKValidation, ErrOptions)) + } + if j.marshal.USE != j.options.Metadata.USE { + return fmt.Errorf("%w: USE in marshal does not match USE in options", errors.Join(ErrJWKValidation, ErrOptions)) + } + } + + if len(j.options.X509.X5C) > 0 { + cert := j.options.X509.X5C[0] + i := cert.PublicKey + switch k := j.key.(type) { + // ECDH keys are not used to sign certificates. + case *ecdsa.PublicKey: + pub, ok := i.(*ecdsa.PublicKey) + if !ok { + return fmt.Errorf("%w: Golang key is type *ecdsa.Public but X.509 public key was of type %T", errors.Join(ErrJWKValidation, ErrX509Mismatch), i) + } + if !k.Equal(pub) { + return fmt.Errorf("%w: Golang *ecdsa.PublicKey does not match the X.509 public key", errors.Join(ErrJWKValidation, ErrX509Mismatch)) + } + case ed25519.PublicKey: + pub, ok := i.(ed25519.PublicKey) + if !ok { + return fmt.Errorf("%w: Golang key is type ed25519.PublicKey but X.509 public key was of type %T", errors.Join(ErrJWKValidation, ErrX509Mismatch), i) + } + if !bytes.Equal(k, pub) { + return fmt.Errorf("%w: Golang ed25519.PublicKey does not match the X.509 public key", errors.Join(ErrJWKValidation, ErrX509Mismatch)) + } + case *rsa.PublicKey: + pub, ok := i.(*rsa.PublicKey) + if !ok { + return fmt.Errorf("%w: Golang key is type *rsa.PublicKey but X.509 public key was of type %T", errors.Join(ErrJWKValidation, ErrX509Mismatch), i) + } + if !k.Equal(pub) { + return fmt.Errorf("%w: Golang *rsa.PublicKey does not match the X.509 public key", errors.Join(ErrJWKValidation, ErrX509Mismatch)) + } + default: + return fmt.Errorf("%w: Golang key is type %T, which is not supported, so it cannot be compared to given X.509 certificates", errors.Join(ErrJWKValidation, ErrUnsupportedKey, ErrX509Mismatch), j.key) + } + if cert.PublicKeyAlgorithm == x509.Ed25519 { + if j.marshal.ALG != AlgEdDSA { + return fmt.Errorf("%w: ALG in marshal does not match ALG in X.509 certificate", errors.Join(ErrJWKValidation, ErrX509Mismatch)) + } + } + if j.options.Validate.CheckX509ValidTime { + now := time.Now() + if now.Before(cert.NotBefore) { + return fmt.Errorf("%w: X.509 certificate is not yet valid", ErrJWKValidation) + } + if now.After(cert.NotAfter) { + return fmt.Errorf("%w: X.509 certificate is expired", ErrJWKValidation) + } + } + } + + marshalled, err := keyMarshal(j.key, j.options) + if err != nil { + return fmt.Errorf("failed to marshal JSON Web Key: %w", errors.Join(ErrJWKValidation, err)) + } + + // Remove automatically computed thumbprints if not set in given JWK. + if j.marshal.X5T == "" { + marshalled.X5T = "" + } + if j.marshal.X5TS256 == "" { + marshalled.X5TS256 = "" + } + + canComputeThumbprint := len(j.marshal.X5C) > 0 + if j.marshal.X5T != marshalled.X5T && canComputeThumbprint { + return fmt.Errorf("%w: X5T in marshal does not match X5T in marshalled", ErrJWKValidation) + } + if j.marshal.X5TS256 != marshalled.X5TS256 && canComputeThumbprint { + return fmt.Errorf("%w: X5TS256 in marshal does not match X5TS256 in marshalled", ErrJWKValidation) + } + if j.marshal.CRV != marshalled.CRV { + return fmt.Errorf("%w: CRV in marshal does not match CRV in marshalled", ErrJWKValidation) + } + switch j.marshal.KTY { + case KtyEC: + err = cmpBase64Int(j.marshal.X, marshalled.X, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: X in marshal does not match X in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.Y, marshalled.Y, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: Y in marshal does not match Y in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.D, marshalled.D, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: D in marshal does not match D in marshalled", errors.Join(ErrJWKValidation, err)) + } + case KtyOKP: + if j.marshal.X != marshalled.X { + return fmt.Errorf("%w: X in marshal does not match X in marshalled", ErrJWKValidation) + } + if j.marshal.D != marshalled.D { + return fmt.Errorf("%w: D in marshal does not match D in marshalled", ErrJWKValidation) + } + case KtyRSA: + err = cmpBase64Int(j.marshal.D, marshalled.D, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: D in marshal does not match D in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.N, marshalled.N, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: N in marshal does not match N in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.E, marshalled.E, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: E in marshal does not match E in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.P, marshalled.P, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: P in marshal does not match P in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.Q, marshalled.Q, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: Q in marshal does not match Q in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.DP, marshalled.DP, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: DP in marshal does not match DP in marshalled", errors.Join(ErrJWKValidation, err)) + } + err = cmpBase64Int(j.marshal.DQ, marshalled.DQ, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: DQ in marshal does not match DQ in marshalled", errors.Join(ErrJWKValidation, err)) + } + if len(j.marshal.OTH) != len(marshalled.OTH) { + return fmt.Errorf("%w: OTH in marshal does not match OTH in marshalled", ErrJWKValidation) + } + for i, o := range j.marshal.OTH { + err = cmpBase64Int(o.R, marshalled.OTH[i].R, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: OTH index %d in marshal does not match OTH in marshalled", errors.Join(ErrJWKValidation, err), i) + } + err = cmpBase64Int(o.D, marshalled.OTH[i].D, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: OTH index %d in marshal does not match OTH in marshalled", errors.Join(ErrJWKValidation, err), i) + } + err = cmpBase64Int(o.T, marshalled.OTH[i].T, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: OTH index %d in marshal does not match OTH in marshalled", errors.Join(ErrJWKValidation, err), i) + } + } + case KtyOct: + err = cmpBase64Int(j.marshal.K, marshalled.K, j.options.Validate.StrictPadding) + if err != nil { + return fmt.Errorf("%w: K in marshal does not match K in marshalled", errors.Join(ErrJWKValidation, err)) + } + default: + return fmt.Errorf("%w: invalid or unsupported key type %q", ErrJWKValidation, j.marshal.KTY) + } + + // Saved for last because it may involve a network request. + if j.marshal.X5U != "" || j.options.X509.X5U != "" { + if j.marshal.X5U != j.options.X509.X5U { + return fmt.Errorf("%w: X5U in marshal does not match X5U in options", errors.Join(ErrJWKValidation, ErrOptions)) + } + u, err := url.ParseRequestURI(j.marshal.X5U) + if err != nil { + return fmt.Errorf("failed to parse X5U URI: %w", errors.Join(ErrJWKValidation, ErrOptions, err)) + } + if !j.options.Validate.SkipX5UScheme && u.Scheme != "https" { + return fmt.Errorf("%w: X5U URI scheme must be https", errors.Join(ErrJWKValidation, ErrOptions)) + } + if j.options.Validate.GetX5U != nil { + certs, err := j.options.Validate.GetX5U(u) + if err != nil { + return fmt.Errorf("failed to get X5U URI: %w", errors.Join(ErrJWKValidation, ErrOptions, err)) + } + if len(certs) == 0 { + return fmt.Errorf("%w: X5U URI did not return any certificates", errors.Join(ErrJWKValidation, ErrOptions)) + } + larger := certs + smaller := j.options.X509.X5C + if len(j.options.X509.X5C) > len(certs) { + larger = j.options.X509.X5C + smaller = certs + } + for i, c := range smaller { + if !c.Equal(larger[i]) { + return fmt.Errorf("%w: the X5C and X5U (remote resource) parameters are not a full or partial match", errors.Join(ErrJWKValidation, ErrOptions)) + } + } + } + } + + return nil +} + +// DefaultGetX5U is the default implementation of the GetX5U field for JWKValidateOptions. +func DefaultGetX5U(u *url.URL) ([]*x509.Certificate, error) { + timeout := time.Minute + ctx, cancel := context.WithTimeoutCause(context.Background(), timeout, fmt.Errorf("%w: timeout of %s reached", ErrGetX5U, timeout.String())) + defer cancel() + req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) + if err != nil { + return nil, fmt.Errorf("failed to create X5U request: %w", errors.Join(ErrGetX5U, err)) + } + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to do X5U request: %w", errors.Join(ErrGetX5U, err)) + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%w: X5U request returned status code %d", ErrGetX5U, resp.StatusCode) + } + b, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read X5U response body: %w", errors.Join(ErrGetX5U, err)) + } + certs, err := LoadCertificates(b) + if err != nil { + return nil, fmt.Errorf("failed to parse X5U response body: %w", errors.Join(ErrGetX5U, err)) + } + return certs, nil +} + +func cmpBase64Int(first, second string, strictPadding bool) error { + if first == second { + return nil + } + b, err := base64.RawURLEncoding.DecodeString(first) + if err != nil { + return fmt.Errorf("failed to decode Base64 raw URL decode first string: %w", err) + } + fLen := len(b) + f := new(big.Int).SetBytes(b) + b, err = base64.RawURLEncoding.DecodeString(second) + if err != nil { + return fmt.Errorf("failed to decode Base64 raw URL decode second string: %w", err) + } + sLen := len(b) + s := new(big.Int).SetBytes(b) + if f.Cmp(s) != 0 { + return fmt.Errorf("%w: the parsed integers do not match", ErrJWKValidation) + } + if strictPadding && fLen != sLen { + return fmt.Errorf("%w: the Base64 raw URL inputs do not have matching padding", errors.Join(ErrJWKValidation, ErrPadding)) + } + return nil +} diff --git a/vendor/github.com/MicahParks/jwkset/marshal.go b/vendor/github.com/MicahParks/jwkset/marshal.go new file mode 100644 index 0000000000..c604e22df1 --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/marshal.go @@ -0,0 +1,511 @@ +package jwkset + +import ( + "context" + "crypto/ecdh" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/elliptic" + "crypto/rsa" + "crypto/sha1" + "crypto/sha256" + "crypto/x509" + "encoding/base64" + "errors" + "fmt" + "math" + "math/big" + "slices" + "strings" +) + +var ( + // ErrGetX5U indicates there was an error getting the X5U remote resource. + ErrGetX5U = errors.New("failed to get X5U via given URI") + // ErrJWKValidation indicates that a JWK failed to validate. + ErrJWKValidation = errors.New("failed to validate JWK") + // ErrKeyUnmarshalParameter indicates that a JWK's attributes are invalid and cannot be unmarshaled. + ErrKeyUnmarshalParameter = errors.New("unable to unmarshal JWK due to invalid attributes") + // ErrOptions indicates that the given options caused an error. + ErrOptions = errors.New("the given options caused an error") + // ErrUnsupportedKey indicates a key is not supported. + ErrUnsupportedKey = errors.New("unsupported key") + // ErrX509Mismatch indicates that the X.509 certificate does not match the key. + ErrX509Mismatch = errors.New("the X.509 certificate does not match Golang key type") +) + +// OtherPrimes is for RSA private keys that have more than 2 primes. +// https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.7 +type OtherPrimes struct { + R string `json:"r,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.7.1 + D string `json:"d,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.7.2 + T string `json:"t,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.7.3 +} + +// JWKMarshal is used to marshal or unmarshal a JSON Web Key. +// https://www.rfc-editor.org/rfc/rfc7517 +// https://www.rfc-editor.org/rfc/rfc7518 +// https://www.rfc-editor.org/rfc/rfc8037 +// +// You can find the full list at https://www.iana.org/assignments/jose/jose.xhtml under "JSON Web Key Parameters". +type JWKMarshal struct { + KTY KTY `json:"kty,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.1 + USE USE `json:"use,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.2 + KEYOPS []KEYOPS `json:"key_ops,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.3 + ALG ALG `json:"alg,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.4 and https://www.rfc-editor.org/rfc/rfc7518#section-4.1 + KID string `json:"kid,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.5 + X5U string `json:"x5u,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.6 + X5C []string `json:"x5c,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.7 + X5T string `json:"x5t,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.8 + X5TS256 string `json:"x5t#S256,omitempty"` // https://www.rfc-editor.org/rfc/rfc7517#section-4.9 + CRV CRV `json:"crv,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.2.1.1 and https://www.rfc-editor.org/rfc/rfc8037.html#section-2 + X string `json:"x,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.2.1.2 and https://www.rfc-editor.org/rfc/rfc8037.html#section-2 + Y string `json:"y,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.2.1.3 + D string `json:"d,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.1 and https://www.rfc-editor.org/rfc/rfc7518#section-6.2.2.1 and https://www.rfc-editor.org/rfc/rfc8037.html#section-2 + N string `json:"n,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.1.1 + E string `json:"e,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.1.2 + P string `json:"p,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.2 + Q string `json:"q,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.3 + DP string `json:"dp,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.4 + DQ string `json:"dq,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.5 + QI string `json:"qi,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.6 + OTH []OtherPrimes `json:"oth,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.3.2.7 + K string `json:"k,omitempty"` // https://www.rfc-editor.org/rfc/rfc7518#section-6.4.1 +} + +// JWKSMarshal is used to marshal or unmarshal a JSON Web Key Set. +type JWKSMarshal struct { + Keys []JWKMarshal `json:"keys"` +} + +// JWKSlice converts the JWKSMarshal to a []JWK. +func (j JWKSMarshal) JWKSlice() ([]JWK, error) { + slice := make([]JWK, len(j.Keys)) + for i, key := range j.Keys { + marshalOptions := JWKMarshalOptions{ + Private: true, + } + jwk, err := keyUnmarshal(key, marshalOptions, JWKValidateOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal JWK: %w", err) + } + slice[i] = jwk + } + return slice, nil +} + +// ToStorage converts the JWKSMarshal to a Storage. +func (j JWKSMarshal) ToStorage() (Storage, error) { + m := NewMemoryStorage() + jwks, err := j.JWKSlice() + if err != nil { + return nil, fmt.Errorf("failed to create a slice of JWK from JWKSMarshal: %w", err) + } + for _, jwk := range jwks { + err = m.KeyWrite(context.Background(), jwk) + if err != nil { + return nil, fmt.Errorf("failed to write JWK to storage: %w", err) + } + } + return m, nil +} + +func keyMarshal(key any, options JWKOptions) (JWKMarshal, error) { + m := JWKMarshal{} + m.ALG = options.Metadata.ALG + switch key := key.(type) { + case *ecdh.PublicKey: + pub := key.Bytes() + m.CRV = CrvX25519 + m.X = base64.RawURLEncoding.EncodeToString(pub) + m.KTY = KtyOKP + case *ecdh.PrivateKey: + pub := key.PublicKey().Bytes() + m.CRV = CrvX25519 + m.X = base64.RawURLEncoding.EncodeToString(pub) + m.KTY = KtyOKP + if options.Marshal.Private { + priv := key.Bytes() + m.D = base64.RawURLEncoding.EncodeToString(priv) + } + case *ecdsa.PrivateKey: + pub := key.PublicKey + m.CRV = CRV(pub.Curve.Params().Name) + l := uint(pub.Curve.Params().BitSize / 8) + if pub.Curve.Params().BitSize%8 != 0 { + l++ + } + m.X = bigIntToBase64RawURL(pub.X, l) + m.Y = bigIntToBase64RawURL(pub.Y, l) + m.KTY = KtyEC + if options.Marshal.Private { + params := key.Curve.Params() + f, _ := params.N.Float64() + l = uint(math.Ceil(math.Log2(f) / 8)) + m.D = bigIntToBase64RawURL(key.D, l) + } + case *ecdsa.PublicKey: + l := uint(key.Curve.Params().BitSize / 8) + if key.Curve.Params().BitSize%8 != 0 { + l++ + } + m.CRV = CRV(key.Curve.Params().Name) + m.X = bigIntToBase64RawURL(key.X, l) + m.Y = bigIntToBase64RawURL(key.Y, l) + m.KTY = KtyEC + case ed25519.PrivateKey: + pub := key.Public().(ed25519.PublicKey) + m.ALG = AlgEdDSA + m.CRV = CrvEd25519 + m.X = base64.RawURLEncoding.EncodeToString(pub) + m.KTY = KtyOKP + if options.Marshal.Private { + m.D = base64.RawURLEncoding.EncodeToString(key[:32]) + } + case ed25519.PublicKey: + m.ALG = AlgEdDSA + m.CRV = CrvEd25519 + m.X = base64.RawURLEncoding.EncodeToString(key) + m.KTY = KtyOKP + case *rsa.PrivateKey: + pub := key.PublicKey + m.E = bigIntToBase64RawURL(big.NewInt(int64(pub.E)), 0) + m.N = bigIntToBase64RawURL(pub.N, 0) + m.KTY = KtyRSA + if options.Marshal.Private { + m.D = bigIntToBase64RawURL(key.D, 0) + m.P = bigIntToBase64RawURL(key.Primes[0], 0) + m.Q = bigIntToBase64RawURL(key.Primes[1], 0) + m.DP = bigIntToBase64RawURL(key.Precomputed.Dp, 0) + m.DQ = bigIntToBase64RawURL(key.Precomputed.Dq, 0) + m.QI = bigIntToBase64RawURL(key.Precomputed.Qinv, 0) + if len(key.Precomputed.CRTValues) > 0 { + m.OTH = make([]OtherPrimes, len(key.Precomputed.CRTValues)) + for i := 0; i < len(key.Precomputed.CRTValues); i++ { + m.OTH[i] = OtherPrimes{ + D: bigIntToBase64RawURL(key.Precomputed.CRTValues[i].Exp, 0), + T: bigIntToBase64RawURL(key.Precomputed.CRTValues[i].Coeff, 0), + R: bigIntToBase64RawURL(key.Primes[i+2], 0), + } + } + } + } + case *rsa.PublicKey: + m.E = bigIntToBase64RawURL(big.NewInt(int64(key.E)), 0) + m.N = bigIntToBase64RawURL(key.N, 0) + m.KTY = KtyRSA + case []byte: + if options.Marshal.Private { + m.KTY = KtyOct + m.K = base64.RawURLEncoding.EncodeToString(key) + } else { + return JWKMarshal{}, fmt.Errorf("%w: incorrect options to marshal symmetric key (oct)", ErrOptions) + } + default: + return JWKMarshal{}, fmt.Errorf("%w: %T", ErrUnsupportedKey, key) + } + haveX5C := len(options.X509.X5C) > 0 + if haveX5C { + for i, cert := range options.X509.X5C { + m.X5C = append(m.X5C, base64.StdEncoding.EncodeToString(cert.Raw)) + if i == 0 { + h1 := sha1.Sum(cert.Raw) + m.X5T = base64.RawURLEncoding.EncodeToString(h1[:]) + h256 := sha256.Sum256(cert.Raw) + m.X5TS256 = base64.RawURLEncoding.EncodeToString(h256[:]) + } + } + } + m.KID = options.Metadata.KID + m.KEYOPS = options.Metadata.KEYOPS + m.USE = options.Metadata.USE + m.X5U = options.X509.X5U + return m, nil +} + +func keyUnmarshal(marshal JWKMarshal, options JWKMarshalOptions, validateOptions JWKValidateOptions) (JWK, error) { + marshalCopy := JWKMarshal{} + var key any + switch marshal.KTY { + case KtyEC: + if marshal.CRV == "" || marshal.X == "" || marshal.Y == "" { + return JWK{}, fmt.Errorf(`%w: %s requires parameters "crv", "x", and "y"`, ErrKeyUnmarshalParameter, KtyEC) + } + x, err := base64urlTrailingPadding(marshal.X) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "x": %w`, KtyEC, err) + } + y, err := base64urlTrailingPadding(marshal.Y) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "y": %w`, KtyEC, err) + } + publicKey := &ecdsa.PublicKey{ + X: new(big.Int).SetBytes(x), + Y: new(big.Int).SetBytes(y), + } + switch marshal.CRV { + case CrvP256: + publicKey.Curve = elliptic.P256() + case CrvP384: + publicKey.Curve = elliptic.P384() + case CrvP521: + publicKey.Curve = elliptic.P521() + default: + return JWK{}, fmt.Errorf("%w: unsupported curve type %q", ErrKeyUnmarshalParameter, marshal.CRV) + } + marshalCopy.CRV = marshal.CRV + marshalCopy.X = marshal.X + marshalCopy.Y = marshal.Y + if options.Private && marshal.D != "" { + d, err := base64urlTrailingPadding(marshal.D) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "d": %w`, KtyEC, err) + } + privateKey := &ecdsa.PrivateKey{ + PublicKey: *publicKey, + D: new(big.Int).SetBytes(d), + } + key = privateKey + marshalCopy.D = marshal.D + } else { + key = publicKey + } + case KtyOKP: + if marshal.CRV == "" || marshal.X == "" { + return JWK{}, fmt.Errorf(`%w: %s requires parameters "crv" and "x"`, ErrKeyUnmarshalParameter, KtyOKP) + } + public, err := base64urlTrailingPadding(marshal.X) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "x": %w`, KtyOKP, err) + } + marshalCopy.CRV = marshal.CRV + marshalCopy.X = marshal.X + var private []byte + if options.Private && marshal.D != "" { + private, err = base64urlTrailingPadding(marshal.D) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "d": %w`, KtyOKP, err) + } + } + switch marshal.CRV { + case CrvEd25519: + if len(public) != ed25519.PublicKeySize { + return JWK{}, fmt.Errorf("%w: %s key should be %d bytes", ErrKeyUnmarshalParameter, KtyOKP, ed25519.PublicKeySize) + } + if options.Private && marshal.D != "" { + private = append(private, public...) + if len(private) != ed25519.PrivateKeySize { + return JWK{}, fmt.Errorf("%w: %s key should be %d bytes", ErrKeyUnmarshalParameter, KtyOKP, ed25519.PrivateKeySize) + } + key = ed25519.PrivateKey(private) + marshalCopy.D = marshal.D + } else { + key = ed25519.PublicKey(public) + } + case CrvX25519: + const x25519PublicKeySize = 32 + if len(public) != x25519PublicKeySize { + return JWK{}, fmt.Errorf("%w: %s with curve %s public key should be %d bytes", ErrKeyUnmarshalParameter, KtyOKP, CrvEd25519, x25519PublicKeySize) + } + if options.Private && marshal.D != "" { + const x25519PrivateKeySize = 32 + if len(private) != x25519PrivateKeySize { + return JWK{}, fmt.Errorf("%w: %s with curve %s private key should be %d bytes", ErrKeyUnmarshalParameter, KtyOKP, CrvEd25519, x25519PrivateKeySize) + } + key, err = ecdh.X25519().NewPrivateKey(private) + if err != nil { + return JWK{}, fmt.Errorf("failed to create X25519 private key: %w", err) + } + marshalCopy.D = marshal.D + } else { + key, err = ecdh.X25519().NewPublicKey(public) + if err != nil { + return JWK{}, fmt.Errorf("failed to create X25519 public key: %w", err) + } + } + default: + return JWK{}, fmt.Errorf("%w: unsupported curve type %q", ErrKeyUnmarshalParameter, marshal.CRV) + } + case KtyRSA: + if marshal.N == "" || marshal.E == "" { + return JWK{}, fmt.Errorf(`%w: %s requires parameters "n" and "e"`, ErrKeyUnmarshalParameter, KtyRSA) + } + n, err := base64urlTrailingPadding(marshal.N) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "n": %w`, KtyRSA, err) + } + e, err := base64urlTrailingPadding(marshal.E) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "e": %w`, KtyRSA, err) + } + publicKey := rsa.PublicKey{ + N: new(big.Int).SetBytes(n), + E: int(new(big.Int).SetBytes(e).Uint64()), + } + marshalCopy.N = marshal.N + marshalCopy.E = marshal.E + if options.Private && marshal.D != "" && marshal.P != "" && marshal.Q != "" && marshal.DP != "" && marshal.DQ != "" && marshal.QI != "" { // TODO Only "d" is required, but if one of the others is present, they all must be. + d, err := base64urlTrailingPadding(marshal.D) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "d": %w`, KtyRSA, err) + } + p, err := base64urlTrailingPadding(marshal.P) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "p": %w`, KtyRSA, err) + } + q, err := base64urlTrailingPadding(marshal.Q) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "q": %w`, KtyRSA, err) + } + dp, err := base64urlTrailingPadding(marshal.DP) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "dp": %w`, KtyRSA, err) + } + dq, err := base64urlTrailingPadding(marshal.DQ) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "dq": %w`, KtyRSA, err) + } + qi, err := base64urlTrailingPadding(marshal.QI) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "qi": %w`, KtyRSA, err) + } + var oth []rsa.CRTValue + primes := []*big.Int{ + new(big.Int).SetBytes(p), + new(big.Int).SetBytes(q), + } + if len(marshal.OTH) > 0 { + oth = make([]rsa.CRTValue, len(marshal.OTH)) + for i, otherPrimes := range marshal.OTH { + if otherPrimes.R == "" || otherPrimes.D == "" || otherPrimes.T == "" { + return JWK{}, fmt.Errorf(`%w: %s requires parameters "r", "d", and "t" for each "oth"`, ErrKeyUnmarshalParameter, KtyRSA) + } + othD, err := base64urlTrailingPadding(otherPrimes.D) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "d": %w`, KtyRSA, err) + } + othT, err := base64urlTrailingPadding(otherPrimes.T) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "t": %w`, KtyRSA, err) + } + othR, err := base64urlTrailingPadding(otherPrimes.R) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "r": %w`, KtyRSA, err) + } + primes = append(primes, new(big.Int).SetBytes(othR)) + oth[i] = rsa.CRTValue{ + Exp: new(big.Int).SetBytes(othD), + Coeff: new(big.Int).SetBytes(othT), + R: new(big.Int).SetBytes(othR), + } + } + } + privateKey := &rsa.PrivateKey{ + PublicKey: publicKey, + D: new(big.Int).SetBytes(d), + Primes: primes, + Precomputed: rsa.PrecomputedValues{ + Dp: new(big.Int).SetBytes(dp), + Dq: new(big.Int).SetBytes(dq), + Qinv: new(big.Int).SetBytes(qi), + CRTValues: oth, + }, + } + err = privateKey.Validate() + if err != nil { + return JWK{}, fmt.Errorf(`failed to validate %s key: %w`, KtyRSA, err) + } + key = privateKey + marshalCopy.D = marshal.D + marshalCopy.P = marshal.P + marshalCopy.Q = marshal.Q + marshalCopy.DP = marshal.DP + marshalCopy.DQ = marshal.DQ + marshalCopy.QI = marshal.QI + marshalCopy.OTH = slices.Clone(marshal.OTH) + } else { + key = &publicKey + } + case KtyOct: + if options.Private { + if marshal.K == "" { + return JWK{}, fmt.Errorf(`%w: %s requires parameter "k"`, ErrKeyUnmarshalParameter, KtyOct) + } + k, err := base64urlTrailingPadding(marshal.K) + if err != nil { + return JWK{}, fmt.Errorf(`failed to decode %s key parameter "k": %w`, KtyOct, err) + } + key = k + marshalCopy.K = marshal.K + } else { + return JWK{}, fmt.Errorf("%w: incorrect options to unmarshal symmetric key (%s)", ErrOptions, KtyOct) + } + default: + return JWK{}, fmt.Errorf("%w: %s (kty)", ErrUnsupportedKey, marshal.KTY) + } + marshalCopy.KTY = marshal.KTY + x5c := make([]*x509.Certificate, len(marshal.X5C)) + for i, cert := range marshal.X5C { + raw, err := base64.StdEncoding.DecodeString(cert) + if err != nil { + return JWK{}, fmt.Errorf("failed to Base64 decode X.509 certificate: %w", err) + } + x5c[i], err = x509.ParseCertificate(raw) + if err != nil { + return JWK{}, fmt.Errorf("failed to parse X.509 certificate: %w", err) + } + } + jwkX509 := JWKX509Options{ + X5C: x5c, + X5U: marshal.X5U, + } + marshalCopy.X5C = slices.Clone(marshal.X5C) + marshalCopy.X5T = marshal.X5T + marshalCopy.X5TS256 = marshal.X5TS256 + marshalCopy.X5U = marshal.X5U + metadata := JWKMetadataOptions{ + ALG: marshal.ALG, + KID: marshal.KID, + KEYOPS: slices.Clone(marshal.KEYOPS), + USE: marshal.USE, + } + marshalCopy.ALG = marshal.ALG + marshalCopy.KID = marshal.KID + marshalCopy.KEYOPS = slices.Clone(marshal.KEYOPS) + marshalCopy.USE = marshal.USE + opts := JWKOptions{ + Metadata: metadata, + Marshal: options, + Validate: validateOptions, + X509: jwkX509, + } + j := JWK{ + key: key, + marshal: marshalCopy, + options: opts, + } + return j, nil +} + +// base64urlTrailingPadding removes trailing padding before decoding a string from base64url. Some non-RFC compliant +// JWKS contain padding at the end values for base64url encoded public keys. +// +// Trailing padding is required to be removed from base64url encoded keys. +// RFC 7517 defines base64url the same as RFC 7515 Section 2: +// https://datatracker.ietf.org/doc/html/rfc7517#section-1.1 +// https://datatracker.ietf.org/doc/html/rfc7515#section-2 +func base64urlTrailingPadding(s string) ([]byte, error) { + s = strings.TrimRight(s, "=") + return base64.RawURLEncoding.DecodeString(s) +} + +func bigIntToBase64RawURL(i *big.Int, l uint) string { + var b []byte + if l != 0 { + b = make([]byte, l) + i.FillBytes(b) + } else { + b = i.Bytes() + } + return base64.RawURLEncoding.EncodeToString(b) +} diff --git a/vendor/github.com/MicahParks/jwkset/storage.go b/vendor/github.com/MicahParks/jwkset/storage.go new file mode 100644 index 0000000000..3a057d2428 --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/storage.go @@ -0,0 +1,311 @@ +package jwkset + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/http" + "net/url" + "slices" + "sync" + "time" +) + +var ( + // ErrKeyNotFound is returned by a Storage implementation when a key is not found. + ErrKeyNotFound = errors.New("key not found") + // ErrInvalidHTTPStatusCode is returned when the HTTP status code is invalid. + ErrInvalidHTTPStatusCode = errors.New("invalid HTTP status code") +) + +// Storage handles storage operations for a JWKSet. +type Storage interface { + // KeyDelete deletes a key from the storage. It will return ok as true if the key was present for deletion. + KeyDelete(ctx context.Context, keyID string) (ok bool, err error) + // KeyRead reads a key from the storage. If the key is not present, it returns ErrKeyNotFound. Any pointers returned + // should be considered read-only. + KeyRead(ctx context.Context, keyID string) (JWK, error) + // KeyReadAll reads a snapshot of all keys from storage. As with ReadKey, any pointers returned should be + // considered read-only. + KeyReadAll(ctx context.Context) ([]JWK, error) + // KeyWrite writes a key to the storage. If the key already exists, it will be overwritten. After writing a key, + // any pointers written should be considered owned by the underlying storage. + KeyWrite(ctx context.Context, jwk JWK) error + + // JSON creates the JSON representation of the JWKSet. + JSON(ctx context.Context) (json.RawMessage, error) + // JSONPublic creates the JSON representation of the public keys in JWKSet. + JSONPublic(ctx context.Context) (json.RawMessage, error) + // JSONPrivate creates the JSON representation of the JWKSet public and private key material. + JSONPrivate(ctx context.Context) (json.RawMessage, error) + // JSONWithOptions creates the JSON representation of the JWKSet with the given options. These options override whatever + // options are set on the individual JWKs. + JSONWithOptions(ctx context.Context, marshalOptions JWKMarshalOptions, validationOptions JWKValidateOptions) (json.RawMessage, error) + // Marshal transforms the JWK Set's current state into a Go type that can be marshaled into JSON. + Marshal(ctx context.Context) (JWKSMarshal, error) + // MarshalWithOptions transforms the JWK Set's current state into a Go type that can be marshaled into JSON with the + // given options. These options override whatever options are set on the individual JWKs. + MarshalWithOptions(ctx context.Context, marshalOptions JWKMarshalOptions, validationOptions JWKValidateOptions) (JWKSMarshal, error) +} + +var _ Storage = &MemoryJWKSet{} + +type MemoryJWKSet struct { + set []JWK + mux sync.RWMutex +} + +// NewMemoryStorage creates a new in-memory Storage implementation. +func NewMemoryStorage() *MemoryJWKSet { + return &MemoryJWKSet{} +} + +func (m *MemoryJWKSet) KeyDelete(_ context.Context, keyID string) (ok bool, err error) { + m.mux.Lock() + defer m.mux.Unlock() + for i, jwk := range m.set { + if jwk.Marshal().KID == keyID { + m.set = append(m.set[:i], m.set[i+1:]...) + return true, nil + } + } + return ok, nil +} +func (m *MemoryJWKSet) KeyRead(_ context.Context, keyID string) (JWK, error) { + m.mux.RLock() + defer m.mux.RUnlock() + for _, jwk := range m.set { + if jwk.Marshal().KID == keyID { + return jwk, nil + } + } + return JWK{}, fmt.Errorf("%w: kid %q", ErrKeyNotFound, keyID) +} +func (m *MemoryJWKSet) KeyReadAll(_ context.Context) ([]JWK, error) { + m.mux.RLock() + defer m.mux.RUnlock() + return slices.Clone(m.set), nil +} +func (m *MemoryJWKSet) KeyWrite(_ context.Context, jwk JWK) error { + m.mux.Lock() + defer m.mux.Unlock() + m.set = append(m.set, jwk) + return nil +} +func (m *MemoryJWKSet) JSON(ctx context.Context) (json.RawMessage, error) { + jwks, err := m.Marshal(ctx) + if err != nil { + return nil, fmt.Errorf("failed to marshal JWK Set: %w", err) + } + return json.Marshal(jwks) +} +func (m *MemoryJWKSet) JSONPublic(ctx context.Context) (json.RawMessage, error) { + return m.JSONWithOptions(ctx, JWKMarshalOptions{}, JWKValidateOptions{}) +} +func (m *MemoryJWKSet) JSONPrivate(ctx context.Context) (json.RawMessage, error) { + marshalOptions := JWKMarshalOptions{ + Private: true, + } + return m.JSONWithOptions(ctx, marshalOptions, JWKValidateOptions{}) +} +func (m *MemoryJWKSet) JSONWithOptions(ctx context.Context, marshalOptions JWKMarshalOptions, validationOptions JWKValidateOptions) (json.RawMessage, error) { + jwks, err := m.MarshalWithOptions(ctx, marshalOptions, validationOptions) + if err != nil { + return nil, fmt.Errorf("failed to marshal JWK Set with options: %w", err) + } + return json.Marshal(jwks) +} +func (m *MemoryJWKSet) Marshal(ctx context.Context) (JWKSMarshal, error) { + keys, err := m.KeyReadAll(ctx) + if err != nil { + return JWKSMarshal{}, fmt.Errorf("failed to read snapshot of all keys from storage: %w", err) + } + jwks := JWKSMarshal{} + for _, key := range keys { + jwks.Keys = append(jwks.Keys, key.Marshal()) + } + return jwks, nil +} +func (m *MemoryJWKSet) MarshalWithOptions(ctx context.Context, marshalOptions JWKMarshalOptions, validationOptions JWKValidateOptions) (JWKSMarshal, error) { + jwks := JWKSMarshal{} + + keys, err := m.KeyReadAll(ctx) + if err != nil { + return JWKSMarshal{}, fmt.Errorf("failed to read snapshot of all keys from storage: %w", err) + } + + for _, key := range keys { + options := key.options + options.Marshal = marshalOptions + options.Validate = validationOptions + marshal, err := keyMarshal(key.Key(), options) + if err != nil { + if errors.Is(err, ErrOptions) { + continue + } + return JWKSMarshal{}, fmt.Errorf("failed to marshal key: %w", err) + } + jwks.Keys = append(jwks.Keys, marshal) + } + + return jwks, nil +} + +// HTTPClientStorageOptions are used to configure the behavior of NewStorageFromHTTP. +type HTTPClientStorageOptions struct { + // Client is the HTTP client to use for requests. + // + // This defaults to http.DefaultClient. + Client *http.Client + + // Ctx is used when performing HTTP requests. It is also used to end the refresh goroutine when it's no longer + // needed. + // + // This defaults to context.Background(). + Ctx context.Context + + // HTTPExpectedStatus is the expected HTTP status code for the HTTP request. + // + // This defaults to http.StatusOK. + HTTPExpectedStatus int + + // HTTPMethod is the HTTP method to use for the HTTP request. + // + // This defaults to http.MethodGet. + HTTPMethod string + + // HTTPTimeout is the timeout for the HTTP request. When the Ctx option is also provided, this value is used for a + // child context. + // + // This defaults to time.Minute. + HTTPTimeout time.Duration + + // NoErrorReturnFirstHTTPReq will create the Storage without error if the first HTTP request fails. + NoErrorReturnFirstHTTPReq bool + + // RefreshErrorHandler is a function that consumes errors that happen during an HTTP refresh. This is only effectual + // if RefreshInterval is set. + // + // If NoErrorReturnFirstHTTPReq is set, this function will be called when if the first HTTP request fails. + RefreshErrorHandler func(ctx context.Context, err error) + + // RefreshInterval is the interval at which the HTTP URL is refreshed and the JWK Set is processed. This option will + // launch a "refresh goroutine" to refresh the remote HTTP resource at the given interval. + // + // Provide the Ctx option to end the goroutine when it's no longer needed. + RefreshInterval time.Duration + + // ValidateOptions are the options to use when validating the JWKs. + ValidateOptions JWKValidateOptions +} + +type httpStorage struct { + options HTTPClientStorageOptions + refresh func(ctx context.Context) error + Storage +} + +// NewStorageFromHTTP creates a new Storage implementation that processes a remote HTTP resource for a JWK Set. If +// the RefreshInterval option is not set, the remote HTTP resource will be requested and processed before returning. If +// the RefreshInterval option is set, a background goroutine will be launched to refresh the remote HTTP resource and +// not block the return of this function. +func NewStorageFromHTTP(remoteJWKSetURL string, options HTTPClientStorageOptions) (Storage, error) { + if options.Client == nil { + options.Client = http.DefaultClient + } + if options.Ctx == nil { + options.Ctx = context.Background() + } + if options.HTTPExpectedStatus == 0 { + options.HTTPExpectedStatus = http.StatusOK + } + if options.HTTPTimeout == 0 { + options.HTTPTimeout = time.Minute + } + if options.HTTPMethod == "" { + options.HTTPMethod = http.MethodGet + } + store := NewMemoryStorage() + _, err := url.ParseRequestURI(remoteJWKSetURL) + if err != nil { + return nil, fmt.Errorf("failed to parse given URL %q: %w", remoteJWKSetURL, err) + } + + refresh := func(ctx context.Context) error { + req, err := http.NewRequestWithContext(ctx, options.HTTPMethod, remoteJWKSetURL, nil) + if err != nil { + return fmt.Errorf("failed to create HTTP request for JWK Set refresh: %w", err) + } + resp, err := options.Client.Do(req) + if err != nil { + return fmt.Errorf("failed to perform HTTP request for JWK Set refresh: %w", err) + } + //goland:noinspection GoUnhandledErrorResult + defer resp.Body.Close() + if resp.StatusCode != options.HTTPExpectedStatus { + return fmt.Errorf("%w: %d", ErrInvalidHTTPStatusCode, resp.StatusCode) + } + var jwks JWKSMarshal + err = json.NewDecoder(resp.Body).Decode(&jwks) + if err != nil { + return fmt.Errorf("failed to decode JWK Set response: %w", err) + } + store.mux.Lock() + defer store.mux.Unlock() + store.set = make([]JWK, len(jwks.Keys)) // Clear local cache in case of key revocation. + for i, marshal := range jwks.Keys { + marshalOptions := JWKMarshalOptions{ + Private: true, + } + jwk, err := NewJWKFromMarshal(marshal, marshalOptions, options.ValidateOptions) + if err != nil { + return fmt.Errorf("failed to create JWK from JWK Marshal: %w", err) + } + store.set[i] = jwk + } + return nil + } + + if options.RefreshInterval != 0 { + go func() { // Refresh goroutine. + ticker := time.NewTicker(options.RefreshInterval) + defer ticker.Stop() + for { + select { + case <-options.Ctx.Done(): + return + case <-ticker.C: + ctx, cancel := context.WithTimeout(options.Ctx, options.HTTPTimeout) + err := refresh(ctx) + cancel() + if err != nil && options.RefreshErrorHandler != nil { + options.RefreshErrorHandler(ctx, err) + } + } + } + }() + } + + s := httpStorage{ + options: options, + refresh: refresh, + Storage: store, + } + + ctx, cancel := context.WithTimeout(options.Ctx, options.HTTPTimeout) + defer cancel() + err = refresh(ctx) + cancel() + if err != nil { + if options.NoErrorReturnFirstHTTPReq { + if options.RefreshErrorHandler != nil { + options.RefreshErrorHandler(ctx, err) + } + return s, nil + } + return nil, fmt.Errorf("failed to perform first HTTP request for JWK Set: %w", err) + } + + return s, nil +} diff --git a/vendor/github.com/MicahParks/jwkset/x509.go b/vendor/github.com/MicahParks/jwkset/x509.go new file mode 100644 index 0000000000..b89a3a6ea9 --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/x509.go @@ -0,0 +1,125 @@ +package jwkset + +import ( + "crypto/ecdh" + "crypto/ecdsa" + "crypto/ed25519" + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "errors" + "fmt" +) + +var ( + // ErrX509Infer is returned when the key type cannot be inferred from the PEM block type. + ErrX509Infer = errors.New("failed to infer X509 key type") +) + +// LoadCertificate loads an X509 certificate from a PEM block. +func LoadCertificate(pemBlock []byte) (*x509.Certificate, error) { + cert, err := x509.ParseCertificate(pemBlock) + if err != nil { + return nil, fmt.Errorf("failed to parse certificates: %w", err) + } + switch cert.PublicKey.(type) { + case *ecdsa.PublicKey, ed25519.PublicKey, *rsa.PublicKey: + default: + return nil, fmt.Errorf("%w: %T", ErrUnsupportedKey, cert.PublicKey) + } + return cert, nil +} + +// LoadCertificates loads X509 certificates from raw PEM data. It can be useful in loading X5U remote resources. +func LoadCertificates(rawPEM []byte) ([]*x509.Certificate, error) { + b := make([]byte, 0) + for { + block, rest := pem.Decode(rawPEM) + if block == nil { + break + } + rawPEM = rest + if block.Type == "CERTIFICATE" { + b = append(b, block.Bytes...) + } + } + certs, err := x509.ParseCertificates(b) + if err != nil { + return nil, fmt.Errorf("failed to parse certificates: %w", err) + } + for _, cert := range certs { + switch cert.PublicKey.(type) { + case *ecdsa.PublicKey, ed25519.PublicKey, *rsa.PublicKey: + default: + return nil, fmt.Errorf("%w: %T", ErrUnsupportedKey, cert.PublicKey) + } + } + return certs, nil +} + +// LoadX509KeyInfer loads an X509 key from a PEM block. +func LoadX509KeyInfer(pemBlock *pem.Block) (key any, err error) { + switch pemBlock.Type { + case "EC PRIVATE KEY": + key, err = loadECPrivate(pemBlock) + case "RSA PRIVATE KEY": + key, err = loadPKCS1Private(pemBlock) + case "RSA PUBLIC KEY": + key, err = loadPKCS1Public(pemBlock) + case "PRIVATE KEY": + key, err = loadPKCS8Private(pemBlock) + case "PUBLIC KEY": + key, err = loadPKIXPublic(pemBlock) + default: + return nil, ErrX509Infer + } + if err != nil { + return nil, fmt.Errorf("failed to load key from inferred format %q: %w", key, err) + } + return key, nil +} +func loadECPrivate(pemBlock *pem.Block) (priv *ecdsa.PrivateKey, err error) { + priv, err = x509.ParseECPrivateKey(pemBlock.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse EC private key: %w", err) + } + return priv, nil +} +func loadPKCS1Public(pemBlock *pem.Block) (pub *rsa.PublicKey, err error) { + pub, err = x509.ParsePKCS1PublicKey(pemBlock.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse PKCS1 public key: %w", err) + } + return pub, nil +} +func loadPKCS1Private(pemBlock *pem.Block) (priv *rsa.PrivateKey, err error) { + priv, err = x509.ParsePKCS1PrivateKey(pemBlock.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse PKCS1 private key: %w", err) + } + return priv, nil +} +func loadPKCS8Private(pemBlock *pem.Block) (priv any, err error) { + priv, err = x509.ParsePKCS8PrivateKey(pemBlock.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse PKCS8 private key: %w", err) + } + switch priv.(type) { + case *ecdh.PrivateKey, *ecdsa.PrivateKey, ed25519.PrivateKey, *rsa.PrivateKey: + default: + return nil, fmt.Errorf("%w: %T", ErrUnsupportedKey, priv) + } + return priv, nil +} +func loadPKIXPublic(pemBlock *pem.Block) (pub any, err error) { + pub, err = x509.ParsePKIXPublicKey(pemBlock.Bytes) + if err != nil { + return nil, fmt.Errorf("failed to parse PKIX public key: %w", err) + } + switch pub.(type) { + case *ecdh.PublicKey, *ecdsa.PublicKey, ed25519.PublicKey, *rsa.PublicKey: + default: + return nil, fmt.Errorf("%w: %T", ErrUnsupportedKey, pub) + } + return pub, nil +} diff --git a/vendor/github.com/MicahParks/jwkset/x509_gen.sh b/vendor/github.com/MicahParks/jwkset/x509_gen.sh new file mode 100644 index 0000000000..79cc315c14 --- /dev/null +++ b/vendor/github.com/MicahParks/jwkset/x509_gen.sh @@ -0,0 +1,15 @@ +# OpenSSL 3.0.10 1 Aug 2023 (Library: OpenSSL 3.0.10 1 Aug 2023) +openssl req -newkey EC -pkeyopt ec_paramgen_curve:P-521 -noenc -keyout ec521.pem -x509 -out ec521.crt -subj "/C=US/ST=Virginia/L=Richmond/O=Micah Parks/OU=Self/CN=example.com" +openssl req -newkey ED25519 -noenc -keyout ed25519.pem -x509 -out ed25519.crt -subj "/C=US/ST=Virginia/L=Richmond/O=Micah Parks/OU=Self/CN=example.com" +openssl req -newkey RSA:4096 -noenc -keyout rsa4096.pem -x509 -out rsa4096.crt -subj "/C=US/ST=Virginia/L=Richmond/O=Micah Parks/OU=Self/CN=example.com" + +openssl pkey -in ec521.pem -pubout -out ec521pub.pem +openssl pkey -in ed25519.pem -pubout -out ed25519pub.pem +openssl pkey -in rsa4096.pem -pubout -out rsa4096pub.pem + +# For the "RSA PRIVATE KEY" (PKCS#1) and "EC PRIVATE KEY" (SEC1) formats, the PEM files are generated using the +# cmd/gen_pkcs1 and cmd/gen_ec Golang programs, respectively. + +openssl dsaparam -out dsaparam.pem 2048 +openssl gendsa -out dsa.pem dsaparam.pem +openssl dsa -in dsa.pem -pubout -out dsa_pub.pem diff --git a/vendor/github.com/MicahParks/keyfunc/v3/LICENSE b/vendor/github.com/MicahParks/keyfunc/v3/LICENSE new file mode 100644 index 0000000000..06dd4f2104 --- /dev/null +++ b/vendor/github.com/MicahParks/keyfunc/v3/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Micah Parks + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/MicahParks/keyfunc/v3/README.md b/vendor/github.com/MicahParks/keyfunc/v3/README.md new file mode 100644 index 0000000000..e6e304dc39 --- /dev/null +++ b/vendor/github.com/MicahParks/keyfunc/v3/README.md @@ -0,0 +1,81 @@ +[![Go Reference](https://pkg.go.dev/badge/github.com/MicahParks/keyfunc/v3.svg)](https://pkg.go.dev/github.com/MicahParks/keyfunc/v3) + +# keyfunc + +The purpose of this package is to provide a +[`jwt.Keyfunc`](https://pkg.go.dev/github.com/golang-jwt/jwt/v5#Keyfunc) for the +[github.com/golang-jwt/jwt/v5](https://github.com/golang-jwt/jwt) package using a JSON Web Key Set (JWK Set) for parsing +and verifying JSON Web Tokens (JWTs). + +It's common for an identity providers, particularly those +using [OAuth 2.0](https://datatracker.ietf.org/doc/html/rfc6749) +or [OpenID Connect](https://openid.net/developers/how-connect-works/), such +as [Keycloak](https://github.com/MicahParks/keyfunc/blob/master/examples/keycloak/main.go) +or [Amazon Cognito (AWS)](https://github.com/MicahParks/keyfunc/blob/master/examples/aws_cognito/main.go) to expose a +JWK Set via an HTTPS endpoint. This package has the ability to consume that JWK Set and produce a +[`jwt.Keyfunc`](https://pkg.go.dev/github.com/golang-jwt/jwt/v5#Keyfunc). It is important that a JWK Set endpoint is +using HTTPS to ensure the keys are from the correct trusted source. + +## Basic usage + +For complete examples, please see the `examples` directory. + +```go +import "github.com/MicahParks/keyfunc/v3" +``` + +### Step 1: Create the `keyfunc.Keyfunc` + +The below example is for a remote HTTP resource. +See [`examples/json/main.go`](https://github.com/MicahParks/keyfunc/blob/master/examples/json/main.go) for a JSON +example. + +```go +// Create the keyfunc.Keyfunc. +k, err := keyfunc.NewDefaultCtx(ctx, []string{server.URL}) // Context is used to end the refresh goroutine. +if err != nil { + log.Fatalf("Failed to create a keyfunc.Keyfunc from the server's URL.\nError: %s", err) +} +``` + +When using the `keyfunc.NewDefault` function, the JWK Set will be automatically refreshed using +[`jwkset.NewDefaultHTTPClient`](https://pkg.go.dev/github.com/MicahParks/jwkset#NewHTTPClient). This does launch a " +refresh goroutine". If you want the ability to end this goroutine, use the `keyfunc.NewDefaultCtx` function. + +It is also possible to create a `keyfunc.Keyfunc` from given keys like HMAC shared secrets. See `examples/hmac/main.go`. + +### Step 2: Use the `keyfunc.Keyfunc` to parse and verify JWTs + +```go +// Parse the JWT. +parsed, err := jwt.Parse(signed, k.Keyfunc) +if err != nil { + log.Fatalf("Failed to parse the JWT.\nError: %s", err) +} +``` + +## Additional features + +This project's primary purpose is to provide a [`jwt.Keyfunc`](https://pkg.go.dev/github.com/golang-jwt/jwt/v5#Keyfunc) +implementation for JWK Sets. + +Since version `3.X.X`, this project has become a thin wrapper +around [github.com/MicahParks/jwkset](https://github.com/MicahParks/jwkset). Newer versions contain a superset of +features available in versions `2.X.X` and earlier, but some of the deep customization has been moved to the `jwkset` +project. The intention behind this is to make `keyfunc` easier to use for most use cases. + +Access the [`jwkset.Storage`](https://pkg.go.dev/github.com/MicahParks/jwkset#Storage) from a `keyfunc.Keyfunc` via +the `.Storage()` method. Using the [github.com/MicahParks/jwkset](https://github.com/MicahParks/jwkset) package +provides the below features, and more: + +* An HTTP client that automatically updates one or more remote JWK Set resources. +* An automatic refresh of remote HTTP resources when an unknown key ID (`kid`) is encountered. +* X.509 URIs or embedded [certificate chains](https://pkg.go.dev/crypto/x509#Certificate), when a JWK contains them. +* Support for private asymmetric keys. +* Specified key operations and usage. + +## Related projects + +### [`github.com/MicahParks/jwkset`](https://github.com/MicahParks/jwkset): + +A JWK Set implementation. The `keyfunc` project is a wrapper around this project. diff --git a/vendor/github.com/MicahParks/keyfunc/v3/keyfunc.go b/vendor/github.com/MicahParks/keyfunc/v3/keyfunc.go new file mode 100644 index 0000000000..1725731d6d --- /dev/null +++ b/vendor/github.com/MicahParks/keyfunc/v3/keyfunc.go @@ -0,0 +1,177 @@ +package keyfunc + +import ( + "context" + "crypto" + "encoding/json" + "errors" + "fmt" + + "github.com/MicahParks/jwkset" + "github.com/golang-jwt/jwt/v5" +) + +var ( + // ErrKeyfunc is returned when a keyfunc error occurs. + ErrKeyfunc = errors.New("failed keyfunc") +) + +// Keyfunc is meant to be used as the jwt.Keyfunc function for github.com/golang-jwt/jwt/v5. It uses +// github.com/MicahParks/jwkset as a JWK Set storage. +type Keyfunc interface { + Keyfunc(token *jwt.Token) (any, error) + KeyfuncCtx(ctx context.Context) jwt.Keyfunc + Storage() jwkset.Storage +} + +// Options are used to create a new Keyfunc. +type Options struct { + Ctx context.Context + Storage jwkset.Storage + UseWhitelist []jwkset.USE +} + +type keyfunc struct { + ctx context.Context + storage jwkset.Storage + useWhitelist []jwkset.USE +} + +// New creates a new Keyfunc. +func New(options Options) (Keyfunc, error) { + ctx := options.Ctx + if ctx == nil { + ctx = context.Background() + } + if options.Storage == nil { + return nil, fmt.Errorf("%w: no JWK Set storage given in options", ErrKeyfunc) + } + k := keyfunc{ + ctx: ctx, + storage: options.Storage, + useWhitelist: options.UseWhitelist, + } + return k, nil +} + +// NewDefault creates a new Keyfunc with a default JWK Set storage and options. +// +// This will launch "refresh goroutine" to automatically refresh the remote HTTP resources. +func NewDefault(urls []string) (Keyfunc, error) { + return NewDefaultCtx(context.Background(), urls) +} + +// NewDefaultCtx creates a new Keyfunc with a default JWK Set storage and options. The context is used to end the +// "refresh goroutine". +// +// This will launch "refresh goroutine" to automatically refresh the remote HTTP resources. +func NewDefaultCtx(ctx context.Context, urls []string) (Keyfunc, error) { + client, err := jwkset.NewDefaultHTTPClientCtx(ctx, urls) + if err != nil { + return nil, err + } + options := Options{ + Storage: client, + } + return New(options) +} + +// NewJWKJSON creates a new Keyfunc from raw JWK JSON. +func NewJWKJSON(raw json.RawMessage) (Keyfunc, error) { + marshalOptions := jwkset.JWKMarshalOptions{ + Private: true, + } + jwk, err := jwkset.NewJWKFromRawJSON(raw, marshalOptions, jwkset.JWKValidateOptions{}) + if err != nil { + return nil, fmt.Errorf("%w: could not create JWK from raw JSON", errors.Join(err, ErrKeyfunc)) + } + store := jwkset.NewMemoryStorage() + err = store.KeyWrite(context.Background(), jwk) + if err != nil { + return nil, fmt.Errorf("%w: could not write JWK to storage", errors.Join(err, ErrKeyfunc)) + } + options := Options{ + Storage: store, + } + return New(options) +} + +// NewJWKSetJSON creates a new Keyfunc from raw JWK Set JSON. +func NewJWKSetJSON(raw json.RawMessage) (Keyfunc, error) { + var jwks jwkset.JWKSMarshal + err := json.Unmarshal(raw, &jwks) + if err != nil { + return nil, fmt.Errorf("%w: could not unmarshal raw JWK Set JSON", errors.Join(err, ErrKeyfunc)) + } + store, err := jwks.ToStorage() + if err != nil { + return nil, fmt.Errorf("%w: could not create JWK Set storage", errors.Join(err, ErrKeyfunc)) + } + options := Options{ + Storage: store, + } + return New(options) +} + +func (k keyfunc) KeyfuncCtx(ctx context.Context) jwt.Keyfunc { + return func(token *jwt.Token) (any, error) { + kidInter, ok := token.Header[jwkset.HeaderKID] + if !ok { + return nil, fmt.Errorf("%w: could not find kid in JWT header", ErrKeyfunc) + } + kid, ok := kidInter.(string) + if !ok { + return nil, fmt.Errorf("%w: could not convert kid in JWT header to string", ErrKeyfunc) + } + algInter, ok := token.Header["alg"] + if !ok { + return nil, fmt.Errorf("%w: could not find alg in JWT header", ErrKeyfunc) + } + alg, ok := algInter.(string) + if !ok { + // For test coverage purposes, this should be impossible to reach because the JWT package rejects a token + // without an alg parameter in the header before calling jwt.Keyfunc. + return nil, fmt.Errorf(`%w: the JWT header did not contain the "alg" parameter, which is required by RFC 7515 section 4.1.1`, ErrKeyfunc) + } + + jwk, err := k.storage.KeyRead(ctx, kid) + if err != nil { + return nil, fmt.Errorf("%w: could not read JWK from storage", errors.Join(err, ErrKeyfunc)) + } + + if a := jwk.Marshal().ALG.String(); a != "" && a != alg { + return nil, fmt.Errorf(`%w: JWK "alg" parameter value %q does not match token "alg" parameter value %q`, ErrKeyfunc, a, alg) + } + if len(k.useWhitelist) > 0 { + found := false + for _, u := range k.useWhitelist { + if jwk.Marshal().USE == u { + found = true + break + } + } + if !found { + return nil, fmt.Errorf(`%w: JWK "use" parameter value %q is not in whitelist`, ErrKeyfunc, jwk.Marshal().USE) + } + } + + type publicKeyer interface { + Public() crypto.PublicKey + } + + key := jwk.Key() + pk, ok := key.(publicKeyer) + if ok { + key = pk.Public() + } + + return key, nil + } +} +func (k keyfunc) Keyfunc(token *jwt.Token) (any, error) { + keyF := k.KeyfuncCtx(k.ctx) + return keyF(token) +} +func (k keyfunc) Storage() jwkset.Storage { + return k.storage +} diff --git a/vendor/github.com/aymerick/douceur/LICENSE b/vendor/github.com/aymerick/douceur/LICENSE new file mode 100644 index 0000000000..6ce87cd374 --- /dev/null +++ b/vendor/github.com/aymerick/douceur/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Aymerick JEHANNE + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/aymerick/douceur/css/declaration.go b/vendor/github.com/aymerick/douceur/css/declaration.go new file mode 100644 index 0000000000..61d29d3359 --- /dev/null +++ b/vendor/github.com/aymerick/douceur/css/declaration.go @@ -0,0 +1,60 @@ +package css + +import "fmt" + +// Declaration represents a parsed style property +type Declaration struct { + Property string + Value string + Important bool +} + +// NewDeclaration instanciates a new Declaration +func NewDeclaration() *Declaration { + return &Declaration{} +} + +// Returns string representation of the Declaration +func (decl *Declaration) String() string { + return decl.StringWithImportant(true) +} + +// StringWithImportant returns string representation with optional !important part +func (decl *Declaration) StringWithImportant(option bool) string { + result := fmt.Sprintf("%s: %s", decl.Property, decl.Value) + + if option && decl.Important { + result += " !important" + } + + result += ";" + + return result +} + +// Equal returns true if both Declarations are equals +func (decl *Declaration) Equal(other *Declaration) bool { + return (decl.Property == other.Property) && (decl.Value == other.Value) && (decl.Important == other.Important) +} + +// +// DeclarationsByProperty +// + +// DeclarationsByProperty represents sortable style declarations +type DeclarationsByProperty []*Declaration + +// Implements sort.Interface +func (declarations DeclarationsByProperty) Len() int { + return len(declarations) +} + +// Implements sort.Interface +func (declarations DeclarationsByProperty) Swap(i, j int) { + declarations[i], declarations[j] = declarations[j], declarations[i] +} + +// Implements sort.Interface +func (declarations DeclarationsByProperty) Less(i, j int) bool { + return declarations[i].Property < declarations[j].Property +} diff --git a/vendor/github.com/aymerick/douceur/css/rule.go b/vendor/github.com/aymerick/douceur/css/rule.go new file mode 100644 index 0000000000..b5a44b5429 --- /dev/null +++ b/vendor/github.com/aymerick/douceur/css/rule.go @@ -0,0 +1,230 @@ +package css + +import ( + "fmt" + "strings" +) + +const ( + indentSpace = 2 +) + +// RuleKind represents a Rule kind +type RuleKind int + +// Rule kinds +const ( + QualifiedRule RuleKind = iota + AtRule +) + +// At Rules than have Rules inside their block instead of Declarations +var atRulesWithRulesBlock = []string{ + "@document", "@font-feature-values", "@keyframes", "@media", "@supports", +} + +// Rule represents a parsed CSS rule +type Rule struct { + Kind RuleKind + + // At Rule name (eg: "@media") + Name string + + // Raw prelude + Prelude string + + // Qualified Rule selectors parsed from prelude + Selectors []string + + // Style properties + Declarations []*Declaration + + // At Rule embedded rules + Rules []*Rule + + // Current rule embedding level + EmbedLevel int +} + +// NewRule instanciates a new Rule +func NewRule(kind RuleKind) *Rule { + return &Rule{ + Kind: kind, + } +} + +// Returns string representation of rule kind +func (kind RuleKind) String() string { + switch kind { + case QualifiedRule: + return "Qualified Rule" + case AtRule: + return "At Rule" + default: + return "WAT" + } +} + +// EmbedsRules returns true if this rule embeds another rules +func (rule *Rule) EmbedsRules() bool { + if rule.Kind == AtRule { + for _, atRuleName := range atRulesWithRulesBlock { + if rule.Name == atRuleName { + return true + } + } + } + + return false +} + +// Equal returns true if both rules are equals +func (rule *Rule) Equal(other *Rule) bool { + if (rule.Kind != other.Kind) || + (rule.Prelude != other.Prelude) || + (rule.Name != other.Name) { + return false + } + + if (len(rule.Selectors) != len(other.Selectors)) || + (len(rule.Declarations) != len(other.Declarations)) || + (len(rule.Rules) != len(other.Rules)) { + return false + } + + for i, sel := range rule.Selectors { + if sel != other.Selectors[i] { + return false + } + } + + for i, decl := range rule.Declarations { + if !decl.Equal(other.Declarations[i]) { + return false + } + } + + for i, rule := range rule.Rules { + if !rule.Equal(other.Rules[i]) { + return false + } + } + + return true +} + +// Diff returns a string representation of rules differences +func (rule *Rule) Diff(other *Rule) []string { + result := []string{} + + if rule.Kind != other.Kind { + result = append(result, fmt.Sprintf("Kind: %s | %s", rule.Kind.String(), other.Kind.String())) + } + + if rule.Prelude != other.Prelude { + result = append(result, fmt.Sprintf("Prelude: \"%s\" | \"%s\"", rule.Prelude, other.Prelude)) + } + + if rule.Name != other.Name { + result = append(result, fmt.Sprintf("Name: \"%s\" | \"%s\"", rule.Name, other.Name)) + } + + if len(rule.Selectors) != len(other.Selectors) { + result = append(result, fmt.Sprintf("Selectors: %v | %v", strings.Join(rule.Selectors, ", "), strings.Join(other.Selectors, ", "))) + } else { + for i, sel := range rule.Selectors { + if sel != other.Selectors[i] { + result = append(result, fmt.Sprintf("Selector: \"%s\" | \"%s\"", sel, other.Selectors[i])) + } + } + } + + if len(rule.Declarations) != len(other.Declarations) { + result = append(result, fmt.Sprintf("Declarations Nb: %d | %d", len(rule.Declarations), len(other.Declarations))) + } else { + for i, decl := range rule.Declarations { + if !decl.Equal(other.Declarations[i]) { + result = append(result, fmt.Sprintf("Declaration: \"%s\" | \"%s\"", decl.String(), other.Declarations[i].String())) + } + } + } + + if len(rule.Rules) != len(other.Rules) { + result = append(result, fmt.Sprintf("Rules Nb: %d | %d", len(rule.Rules), len(other.Rules))) + } else { + + for i, rule := range rule.Rules { + if !rule.Equal(other.Rules[i]) { + result = append(result, fmt.Sprintf("Rule: \"%s\" | \"%s\"", rule.String(), other.Rules[i].String())) + } + } + } + + return result +} + +// Returns the string representation of a rule +func (rule *Rule) String() string { + result := "" + + if rule.Kind == QualifiedRule { + for i, sel := range rule.Selectors { + if i != 0 { + result += ", " + } + result += sel + } + } else { + // AtRule + result += fmt.Sprintf("%s", rule.Name) + + if rule.Prelude != "" { + if result != "" { + result += " " + } + result += fmt.Sprintf("%s", rule.Prelude) + } + } + + if (len(rule.Declarations) == 0) && (len(rule.Rules) == 0) { + result += ";" + } else { + result += " {\n" + + if rule.EmbedsRules() { + for _, subRule := range rule.Rules { + result += fmt.Sprintf("%s%s\n", rule.indent(), subRule.String()) + } + } else { + for _, decl := range rule.Declarations { + result += fmt.Sprintf("%s%s\n", rule.indent(), decl.String()) + } + } + + result += fmt.Sprintf("%s}", rule.indentEndBlock()) + } + + return result +} + +// Returns identation spaces for declarations and rules +func (rule *Rule) indent() string { + result := "" + + for i := 0; i < ((rule.EmbedLevel + 1) * indentSpace); i++ { + result += " " + } + + return result +} + +// Returns identation spaces for end of block character +func (rule *Rule) indentEndBlock() string { + result := "" + + for i := 0; i < (rule.EmbedLevel * indentSpace); i++ { + result += " " + } + + return result +} diff --git a/vendor/github.com/aymerick/douceur/css/stylesheet.go b/vendor/github.com/aymerick/douceur/css/stylesheet.go new file mode 100644 index 0000000000..6b32c2ec90 --- /dev/null +++ b/vendor/github.com/aymerick/douceur/css/stylesheet.go @@ -0,0 +1,25 @@ +package css + +// Stylesheet represents a parsed stylesheet +type Stylesheet struct { + Rules []*Rule +} + +// NewStylesheet instanciate a new Stylesheet +func NewStylesheet() *Stylesheet { + return &Stylesheet{} +} + +// Returns string representation of the Stylesheet +func (sheet *Stylesheet) String() string { + result := "" + + for _, rule := range sheet.Rules { + if result != "" { + result += "\n" + } + result += rule.String() + } + + return result +} diff --git a/vendor/github.com/aymerick/douceur/parser/parser.go b/vendor/github.com/aymerick/douceur/parser/parser.go new file mode 100644 index 0000000000..6c4917ccf9 --- /dev/null +++ b/vendor/github.com/aymerick/douceur/parser/parser.go @@ -0,0 +1,409 @@ +package parser + +import ( + "errors" + "fmt" + "regexp" + "strings" + + "github.com/gorilla/css/scanner" + + "github.com/aymerick/douceur/css" +) + +const ( + importantSuffixRegexp = `(?i)\s*!important\s*$` +) + +var ( + importantRegexp *regexp.Regexp +) + +// Parser represents a CSS parser +type Parser struct { + scan *scanner.Scanner // Tokenizer + + // Tokens parsed but not consumed yet + tokens []*scanner.Token + + // Rule embedding level + embedLevel int +} + +func init() { + importantRegexp = regexp.MustCompile(importantSuffixRegexp) +} + +// NewParser instanciates a new parser +func NewParser(txt string) *Parser { + return &Parser{ + scan: scanner.New(txt), + } +} + +// Parse parses a whole stylesheet +func Parse(text string) (*css.Stylesheet, error) { + result, err := NewParser(text).ParseStylesheet() + if err != nil { + return nil, err + } + + return result, nil +} + +// ParseDeclarations parses CSS declarations +func ParseDeclarations(text string) ([]*css.Declaration, error) { + result, err := NewParser(text).ParseDeclarations() + if err != nil { + return nil, err + } + + return result, nil +} + +// ParseStylesheet parses a stylesheet +func (parser *Parser) ParseStylesheet() (*css.Stylesheet, error) { + result := css.NewStylesheet() + + // Parse BOM + if _, err := parser.parseBOM(); err != nil { + return result, err + } + + // Parse list of rules + rules, err := parser.ParseRules() + if err != nil { + return result, err + } + + result.Rules = rules + + return result, nil +} + +// ParseRules parses a list of rules +func (parser *Parser) ParseRules() ([]*css.Rule, error) { + result := []*css.Rule{} + + inBlock := false + if parser.tokenChar("{") { + // parsing a block of rules + inBlock = true + parser.embedLevel++ + + parser.shiftToken() + } + + for parser.tokenParsable() { + if parser.tokenIgnorable() { + parser.shiftToken() + } else if parser.tokenChar("}") { + if !inBlock { + errMsg := fmt.Sprintf("Unexpected } character: %s", parser.nextToken().String()) + return result, errors.New(errMsg) + } + + parser.shiftToken() + parser.embedLevel-- + + // finished + break + } else { + rule, err := parser.ParseRule() + if err != nil { + return result, err + } + + rule.EmbedLevel = parser.embedLevel + result = append(result, rule) + } + } + + return result, parser.err() +} + +// ParseRule parses a rule +func (parser *Parser) ParseRule() (*css.Rule, error) { + if parser.tokenAtKeyword() { + return parser.parseAtRule() + } + + return parser.parseQualifiedRule() +} + +// ParseDeclarations parses a list of declarations +func (parser *Parser) ParseDeclarations() ([]*css.Declaration, error) { + result := []*css.Declaration{} + + if parser.tokenChar("{") { + parser.shiftToken() + } + + for parser.tokenParsable() { + if parser.tokenIgnorable() { + parser.shiftToken() + } else if parser.tokenChar("}") { + // end of block + parser.shiftToken() + break + } else { + declaration, err := parser.ParseDeclaration() + if err != nil { + return result, err + } + + result = append(result, declaration) + } + } + + return result, parser.err() +} + +// ParseDeclaration parses a declaration +func (parser *Parser) ParseDeclaration() (*css.Declaration, error) { + result := css.NewDeclaration() + curValue := "" + + for parser.tokenParsable() { + if parser.tokenChar(":") { + result.Property = strings.TrimSpace(curValue) + curValue = "" + + parser.shiftToken() + } else if parser.tokenChar(";") || parser.tokenChar("}") { + if result.Property == "" { + errMsg := fmt.Sprintf("Unexpected ; character: %s", parser.nextToken().String()) + return result, errors.New(errMsg) + } + + if importantRegexp.MatchString(curValue) { + result.Important = true + curValue = importantRegexp.ReplaceAllString(curValue, "") + } + + result.Value = strings.TrimSpace(curValue) + + if parser.tokenChar(";") { + parser.shiftToken() + } + + // finished + break + } else { + token := parser.shiftToken() + curValue += token.Value + } + } + + // log.Printf("[parsed] Declaration: %s", result.String()) + + return result, parser.err() +} + +// Parse an At Rule +func (parser *Parser) parseAtRule() (*css.Rule, error) { + // parse rule name (eg: "@import") + token := parser.shiftToken() + + result := css.NewRule(css.AtRule) + result.Name = token.Value + + for parser.tokenParsable() { + if parser.tokenChar(";") { + parser.shiftToken() + + // finished + break + } else if parser.tokenChar("{") { + if result.EmbedsRules() { + // parse rules block + rules, err := parser.ParseRules() + if err != nil { + return result, err + } + + result.Rules = rules + } else { + // parse declarations block + declarations, err := parser.ParseDeclarations() + if err != nil { + return result, err + } + + result.Declarations = declarations + } + + // finished + break + } else { + // parse prelude + prelude, err := parser.parsePrelude() + if err != nil { + return result, err + } + + result.Prelude = prelude + } + } + + // log.Printf("[parsed] Rule: %s", result.String()) + + return result, parser.err() +} + +// Parse a Qualified Rule +func (parser *Parser) parseQualifiedRule() (*css.Rule, error) { + result := css.NewRule(css.QualifiedRule) + + for parser.tokenParsable() { + if parser.tokenChar("{") { + if result.Prelude == "" { + errMsg := fmt.Sprintf("Unexpected { character: %s", parser.nextToken().String()) + return result, errors.New(errMsg) + } + + // parse declarations block + declarations, err := parser.ParseDeclarations() + if err != nil { + return result, err + } + + result.Declarations = declarations + + // finished + break + } else { + // parse prelude + prelude, err := parser.parsePrelude() + if err != nil { + return result, err + } + + result.Prelude = prelude + } + } + + result.Selectors = strings.Split(result.Prelude, ",") + for i, sel := range result.Selectors { + result.Selectors[i] = strings.TrimSpace(sel) + } + + // log.Printf("[parsed] Rule: %s", result.String()) + + return result, parser.err() +} + +// Parse Rule prelude +func (parser *Parser) parsePrelude() (string, error) { + result := "" + + for parser.tokenParsable() && !parser.tokenEndOfPrelude() { + token := parser.shiftToken() + result += token.Value + } + + result = strings.TrimSpace(result) + + // log.Printf("[parsed] prelude: %s", result) + + return result, parser.err() +} + +// Parse BOM +func (parser *Parser) parseBOM() (bool, error) { + if parser.nextToken().Type == scanner.TokenBOM { + parser.shiftToken() + return true, nil + } + + return false, parser.err() +} + +// Returns next token without removing it from tokens buffer +func (parser *Parser) nextToken() *scanner.Token { + if len(parser.tokens) == 0 { + // fetch next token + nextToken := parser.scan.Next() + + // log.Printf("[token] %s => %v", nextToken.Type.String(), nextToken.Value) + + // queue it + parser.tokens = append(parser.tokens, nextToken) + } + + return parser.tokens[0] +} + +// Returns next token and remove it from the tokens buffer +func (parser *Parser) shiftToken() *scanner.Token { + var result *scanner.Token + + result, parser.tokens = parser.tokens[0], parser.tokens[1:] + return result +} + +// Returns tokenizer error, or nil if no error +func (parser *Parser) err() error { + if parser.tokenError() { + token := parser.nextToken() + return fmt.Errorf("Tokenizer error: %s", token.String()) + } + + return nil +} + +// Returns true if next token is Error +func (parser *Parser) tokenError() bool { + return parser.nextToken().Type == scanner.TokenError +} + +// Returns true if next token is EOF +func (parser *Parser) tokenEOF() bool { + return parser.nextToken().Type == scanner.TokenEOF +} + +// Returns true if next token is a whitespace +func (parser *Parser) tokenWS() bool { + return parser.nextToken().Type == scanner.TokenS +} + +// Returns true if next token is a comment +func (parser *Parser) tokenComment() bool { + return parser.nextToken().Type == scanner.TokenComment +} + +// Returns true if next token is a CDO or a CDC +func (parser *Parser) tokenCDOorCDC() bool { + switch parser.nextToken().Type { + case scanner.TokenCDO, scanner.TokenCDC: + return true + default: + return false + } +} + +// Returns true if next token is ignorable +func (parser *Parser) tokenIgnorable() bool { + return parser.tokenWS() || parser.tokenComment() || parser.tokenCDOorCDC() +} + +// Returns true if next token is parsable +func (parser *Parser) tokenParsable() bool { + return !parser.tokenEOF() && !parser.tokenError() +} + +// Returns true if next token is an At Rule keyword +func (parser *Parser) tokenAtKeyword() bool { + return parser.nextToken().Type == scanner.TokenAtKeyword +} + +// Returns true if next token is given character +func (parser *Parser) tokenChar(value string) bool { + token := parser.nextToken() + return (token.Type == scanner.TokenChar) && (token.Value == value) +} + +// Returns true if next token marks the end of a prelude +func (parser *Parser) tokenEndOfPrelude() bool { + return parser.tokenChar(";") || parser.tokenChar("{") +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/.gitignore b/vendor/github.com/brianvoe/gofakeit/v7/.gitignore new file mode 100644 index 0000000000..e69de29bb2 diff --git a/vendor/github.com/brianvoe/gofakeit/v7/BENCHMARKS.md b/vendor/github.com/brianvoe/gofakeit/v7/BENCHMARKS.md new file mode 100644 index 0000000000..0f6776c58f --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/BENCHMARKS.md @@ -0,0 +1,323 @@ +go test -bench=. -benchmem \ +goos: darwin \ +goarch: amd64 \ +pkg: github.com/brianvoe/gofakeit/v7 \ +cpu: Apple M1 Max \ +Table generated with tablesgenerator.com/markdown_tables File->Paste table data + +| Benchmark | Ops | CPU | MEM | MEM alloc | +|---------------------------------------|----------|----------------|--------------|------------------| +| BenchmarkAddress-10 | 1369538 | 874.7 ns/op | 195 B/op | 5 allocs/op | +| BenchmarkStreet-10 | 3438403 | 347.9 ns/op | 25 B/op | 2 allocs/op | +| BenchmarkStreetNumber-10 | 8601847 | 138.2 ns/op | 4 B/op | 1 allocs/op | +| BenchmarkStreetPrefix-10 | 19814623 | 60.26 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkStreetName-10 | 19633909 | 60.78 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkStreetSuffix-10 | 19717612 | 60.19 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCity-10 | 20219280 | 58.88 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkState-10 | 19526760 | 60.85 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkStateAbr-10 | 19634631 | 60.79 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkZip-10 | 7521580 | 157.7 ns/op | 5 B/op | 1 allocs/op | +| BenchmarkCountry-10 | 19451166 | 61.29 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCountryAbr-10 | 19585867 | 60.82 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLatitude-10 | 72309668 | 16.22 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLongitude-10 | 72334910 | 16.23 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLatitudeInRange-10 | 65830375 | 17.77 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLongitudeInRange-10 | 66400602 | 17.77 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPetName-10 | 30391768 | 39.19 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAnimal-10 | 28761544 | 41.22 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAnimalType-10 | 26955640 | 44.13 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkFarmAnimal-10 | 22307872 | 53.39 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCat-10 | 24226416 | 49.13 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkDog-10 | 19702195 | 60.53 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBird-10 | 17095884 | 70.22 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAppName-10 | 3805696 | 314.4 ns/op | 25 B/op | 1 allocs/op | +| BenchmarkAppVersion-10 | 10250247 | 116.4 ns/op | 7 B/op | 1 allocs/op | +| BenchmarkAppAuthor-10 | 11592895 | 101.2 ns/op | 8 B/op | 0 allocs/op | +| BenchmarkUsername-10 | 8975020 | 132.9 ns/op | 16 B/op | 2 allocs/op | +| BenchmarkPassword-10 | 322147 | 3647 ns/op | 1656 B/op | 60 allocs/op | +| BenchmarkBeerName-10 | 27986706 | 42.27 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBeerStyle-10 | 19460616 | 60.99 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBeerHop-10 | 26915132 | 44.35 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBeerYeast-10 | 24840991 | 47.98 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBeerMalt-10 | 20806075 | 57.18 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBeerIbu-10 | 41349307 | 28.99 ns/op | 8 B/op | 1 allocs/op | +| BenchmarkBeerAlcohol-10 | 6054163 | 197.8 ns/op | 28 B/op | 2 allocs/op | +| BenchmarkBeerBlg-10 | 5825622 | 205.6 ns/op | 37 B/op | 2 allocs/op | +| BenchmarkBook-10 | 6927696 | 171.9 ns/op | 48 B/op | 1 allocs/op | +| BenchmarkBookTitle-10 | 31594431 | 37.36 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBookAuthor-10 | 29969000 | 39.91 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBookGenre-10 | 24269676 | 48.77 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCar-10 | 3795943 | 316.3 ns/op | 96 B/op | 1 allocs/op | +| BenchmarkCarType-10 | 26309082 | 43.81 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCarFuelType-10 | 26414821 | 45.18 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCarTransmissionType-10 | 24309171 | 48.83 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCarMaker-10 | 23505099 | 51.01 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCarModel-10 | 19055469 | 62.82 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCelebrityActor-10 | 19915483 | 57.84 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCelebrityBusiness-10 | 20186090 | 67.55 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCelebritySport-10 | 14223360 | 84.47 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkColor-10 | 21535978 | 54.16 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNiceColors-10 | 71414755 | 16.16 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkSafeColor-10 | 24683570 | 46.53 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHexColor-10 | 4815675 | 250.3 ns/op | 24 B/op | 3 allocs/op | +| BenchmarkRGBColor-10 | 19453399 | 61.67 ns/op | 24 B/op | 1 allocs/op | +| BenchmarkCompany-10 | 25604892 | 46.66 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCompanySuffix-10 | 24647574 | 48.83 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBlurb-10 | 20634126 | 58.88 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBuzzWord-10 | 23034157 | 51.84 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBS-10 | 21803314 | 55.08 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkJob-10 | 4121804 | 292.0 ns/op | 64 B/op | 1 allocs/op | +| BenchmarkJobTitle-10 | 24344308 | 47.51 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkJobDescriptor-10 | 24049240 | 50.12 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkJobLevel-10 | 19349389 | 62.45 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkSlogan-10 | 4499653 | 263.1 ns/op | 41 B/op | 1 allocs/op | +| BenchmarkCSVLookup100-10 | 1184 | 1014597 ns/op | 713620 B/op | 9923 allocs/op | +| BenchmarkEmoji-10 | 24200866 | 49.72 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkEmojiDescription-10 | 22978600 | 52.18 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkEmojiCategory-10 | 21228057 | 56.57 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkEmojiAlias-10 | 17616240 | 68.45 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkEmojiTag-10 | 19253190 | 62.21 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkError-10 | 1637725 | 736.5 ns/op | 288 B/op | 8 allocs/op | +| BenchmarkErrorObject-10 | 6755540 | 177.7 ns/op | 32 B/op | 3 allocs/op | +| BenchmarkErrorDatabase-10 | 5794706 | 200.2 ns/op | 63 B/op | 3 allocs/op | +| BenchmarkErrorGRPC-10 | 6063802 | 196.8 ns/op | 64 B/op | 3 allocs/op | +| BenchmarkErrorHTTP-10 | 3956130 | 302.2 ns/op | 158 B/op | 4 allocs/op | +| BenchmarkErrorHTTPClient-10 | 6025258 | 196.4 ns/op | 52 B/op | 3 allocs/op | +| BenchmarkErrorHTTPServer-10 | 5969395 | 202.1 ns/op | 59 B/op | 3 allocs/op | +| BenchmarkErrorRuntime-10 | 4786108 | 248.3 ns/op | 150 B/op | 3 allocs/op | +| BenchmarkErrorValidation-10 | 1811821 | 667.8 ns/op | 277 B/op | 7 allocs/op | +| BenchmarkFileMimeType-10 | 26273320 | 45.47 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkFileExtension-10 | 22216770 | 53.94 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCusip-10 | 6778542 | 176.4 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkIsin-10 | 1844566 | 652.1 ns/op | 525 B/op | 7 allocs/op | +| BenchmarkFruit-10 | 20381516 | 58.81 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkVegetable-10 | 19638996 | 61.11 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBreakfast-10 | 9425649 | 127.2 ns/op | 32 B/op | 1 allocs/op | +| BenchmarkLunch-10 | 8996594 | 133.6 ns/op | 34 B/op | 1 allocs/op | +| BenchmarkDinner-10 | 9427389 | 126.6 ns/op | 36 B/op | 1 allocs/op | +| BenchmarkDrink-10 | 8552294 | 140.4 ns/op | 7 B/op | 2 allocs/op | +| BenchmarkSnack-10 | 7678719 | 156.7 ns/op | 32 B/op | 1 allocs/op | +| BenchmarkDessert-10 | 8907098 | 134.0 ns/op | 31 B/op | 2 allocs/op | +| BenchmarkGamertag-10 | 2474312 | 483.9 ns/op | 83 B/op | 5 allocs/op | +| BenchmarkDice-10 | 47727080 | 25.22 ns/op | 8 B/op | 1 allocs/op | +| BenchmarkGenerate/package-10 | 423741 | 2822 ns/op | 1187 B/op | 29 allocs/op | +| BenchmarkGenerate/Complex-10 | 138112 | 8653 ns/op | 4553 B/op | 80 allocs/op | +| BenchmarkFixedWidthLookup100-10 | 2072 | 583512 ns/op | 489975 B/op | 8701 allocs/op | +| BenchmarkRegex-10 | 633699 | 1914 ns/op | 1632 B/op | 27 allocs/op | +| BenchmarkRegexEmail-10 | 205447 | 5893 ns/op | 4084 B/op | 90 allocs/op | +| BenchmarkMap-10 | 337576 | 3596 ns/op | 1111 B/op | 16 allocs/op | +| BenchmarkHackerPhrase-10 | 166683 | 7209 ns/op | 3107 B/op | 50 allocs/op | +| BenchmarkHackerAbbreviation-10 | 25295019 | 47.33 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHackerAdjective-10 | 24022460 | 49.76 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHackerNoun-10 | 22496308 | 53.31 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHackerVerb-10 | 18546052 | 64.53 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHackeringVerb-10 | 20298242 | 59.05 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkReplaceWithNumbers-10 | 1402717 | 852.8 ns/op | 296 B/op | 10 allocs/op | +| BenchmarkHipsterWord-10 | 25151432 | 47.83 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHipsterSentence-10 | 1314279 | 907.8 ns/op | 288 B/op | 3 allocs/op | +| BenchmarkHipsterParagraph-10 | 67437 | 17682 ns/op | 10521 B/op | 48 allocs/op | +| BenchmarkInputName-10 | 20759898 | 57.98 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkSvg-10 | 225738 | 5181 ns/op | 8876 B/op | 52 allocs/op | +| BenchmarkImageURL-10 | 15524359 | 77.15 ns/op | 38 B/op | 3 allocs/op | +| BenchmarkImage-10 | 63 | 18773091 ns/op | 2457691 B/op | 307202 allocs/op | +| BenchmarkImageJpeg-10 | 39 | 29498291 ns/op | 2982478 B/op | 307217 allocs/op | +| BenchmarkImagePng-10 | 16 | 68552771 ns/op | 5899010 B/op | 307270 allocs/op | +| BenchmarkDomainName-10 | 3001479 | 402.2 ns/op | 95 B/op | 5 allocs/op | +| BenchmarkDomainSuffix-10 | 21476332 | 56.03 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkURL-10 | 1289262 | 934.6 ns/op | 277 B/op | 10 allocs/op | +| BenchmarkHTTPMethod-10 | 19895946 | 60.56 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkIPv4Address-10 | 6088518 | 196.5 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkIPv6Address-10 | 2580320 | 462.0 ns/op | 111 B/op | 8 allocs/op | +| BenchmarkMacAddress-10 | 3281300 | 364.7 ns/op | 24 B/op | 1 allocs/op | +| BenchmarkHTTPStatusCode-10 | 16597051 | 72.18 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHTTPStatusCodeSimple-10 | 17250238 | 69.52 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLogLevel-10 | 20608036 | 58.20 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkUserAgent-10 | 1946059 | 615.5 ns/op | 298 B/op | 5 allocs/op | +| BenchmarkChromeUserAgent-10 | 2619324 | 458.2 ns/op | 184 B/op | 5 allocs/op | +| BenchmarkFirefoxUserAgent-10 | 1601706 | 753.8 ns/op | 362 B/op | 6 allocs/op | +| BenchmarkSafariUserAgent-10 | 1569805 | 764.4 ns/op | 551 B/op | 7 allocs/op | +| BenchmarkOperaUserAgent-10 | 2378972 | 504.7 ns/op | 212 B/op | 5 allocs/op | +| BenchmarkJSONLookup100-10 | 928 | 1276230 ns/op | 813725 B/op | 12134 allocs/op | +| BenchmarkLanguage-10 | 23873984 | 50.34 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLanguageAbbreviation-10 | 25025524 | 47.93 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLanguageBCP-10 | 21895112 | 54.74 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkProgrammingLanguage-10 | 21169636 | 56.70 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLoremIpsumWord-10 | 23980356 | 49.92 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLoremIpsumSentence-10 | 1344384 | 894.8 ns/op | 219 B/op | 2 allocs/op | +| BenchmarkLoremIpsumParagraph-10 | 66643 | 17916 ns/op | 8483 B/op | 40 allocs/op | +| BenchmarkMinecraftOre-10 | 15077451 | 79.85 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftWood-10 | 14422303 | 83.44 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftArmorTier-10 | 15262417 | 78.70 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftArmorPart-10 | 15340200 | 78.11 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftWeapon-10 | 15107792 | 79.78 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftTool-10 | 14428170 | 83.15 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftDye-10 | 14657188 | 81.95 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftFood-10 | 14860236 | 81.01 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftAnimal-10 | 15281302 | 78.35 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftVillagerJob-10 | 14586627 | 82.14 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftVillagerStation-10 | 14678592 | 81.82 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftVillagerLevel-10 | 14314164 | 83.76 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftMobPassive-10 | 15132750 | 79.32 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftMobNeutral-10 | 13802880 | 87.23 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftMobHostile-10 | 13141233 | 91.06 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftMobBoss-10 | 15245322 | 78.79 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftBiome-10 | 14943789 | 79.86 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinecraftWeather-10 | 12681386 | 94.55 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkBool-10 | 73596490 | 16.18 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkUUID-10 | 4128735 | 288.7 ns/op | 48 B/op | 1 allocs/op | +| BenchmarkShuffleAnySlice-10 | 3149857 | 380.0 ns/op | 24 B/op | 1 allocs/op | +| BenchmarkFlipACoin-10 | 74457853 | 16.17 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMovie-10 | 9234234 | 129.3 ns/op | 32 B/op | 1 allocs/op | +| BenchmarkMovieName-10 | 25314163 | 47.82 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMovieGenre-10 | 24570799 | 48.81 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNumber-10 | 74087221 | 16.21 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkUint8-10 | 73790145 | 16.35 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkUint16-10 | 74334474 | 16.27 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkUint32-10 | 71804154 | 16.72 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkUint64-10 | 71385904 | 16.64 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkUintRange-10 | 73982353 | 16.13 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkInt8-10 | 73927286 | 16.14 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkInt16-10 | 74022668 | 16.19 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkInt32-10 | 72009002 | 16.64 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkInt64-10 | 72375081 | 16.59 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkIntRange-10 | 74396306 | 16.20 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkFloat32-10 | 73950822 | 16.20 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkFloat32Range-10 | 73622833 | 16.18 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkFloat64-10 | 73076970 | 16.31 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkFloat64Range-10 | 73385329 | 16.33 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkShuffleInts-10 | 9151563 | 131.8 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkRandomInt-10 | 72188592 | 16.63 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkRandomUint-10 | 72293332 | 16.64 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHexUint-10 | 14888452 | 80.93 ns/op | 16 B/op | 2 allocs/op | +| BenchmarkCurrency-10 | 14366668 | 83.15 ns/op | 32 B/op | 1 allocs/op | +| BenchmarkCurrencyShort-10 | 24445954 | 48.68 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCurrencyLong-10 | 23560556 | 50.65 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPrice-10 | 73693664 | 16.33 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCreditCard-10 | 1000000 | 1153 ns/op | 264 B/op | 15 allocs/op | +| BenchmarkCreditCardType-10 | 32410167 | 36.93 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCreditCardNumber-10 | 1511084 | 799.1 ns/op | 183 B/op | 10 allocs/op | +| BenchmarkCreditCardExp-10 | 11014600 | 108.5 ns/op | 5 B/op | 1 allocs/op | +| BenchmarkCreditCardCvv-10 | 20325733 | 59.31 ns/op | 3 B/op | 1 allocs/op | +| BenchmarkAchRouting-10 | 7338657 | 164.0 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkAchAccount-10 | 5646235 | 212.0 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkBitcoinAddress-10 | 517399 | 2306 ns/op | 715 B/op | 37 allocs/op | +| BenchmarkBitcoinPrivateKey-10 | 1276884 | 943.2 ns/op | 184 B/op | 5 allocs/op | +| BenchmarkName-10 | 7771977 | 152.6 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkFirstName-10 | 23523357 | 50.98 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMiddleName-10 | 17589612 | 68.17 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLastName-10 | 20825980 | 57.63 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNamePrefix-10 | 25542308 | 46.65 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNameSuffix-10 | 21972974 | 54.56 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkSSN-10 | 31829850 | 37.71 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkGender-10 | 73621140 | 16.25 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHobby-10 | 17347129 | 69.06 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPerson-10 | 317911 | 3693 ns/op | 837 B/op | 33 allocs/op | +| BenchmarkContact-10 | 1843221 | 650.8 ns/op | 136 B/op | 6 allocs/op | +| BenchmarkPhone-10 | 6786794 | 176.2 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkPhoneFormatted-10 | 4674930 | 256.2 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkEmail-10 | 2794358 | 431.1 ns/op | 88 B/op | 4 allocs/op | +| BenchmarkTeams-10 | 1576238 | 763.8 ns/op | 672 B/op | 10 allocs/op | +| BenchmarkProduct-10 | 206918 | 5813 ns/op | 1069 B/op | 31 allocs/op | +| BenchmarkProductName-10 | 2313364 | 517.4 ns/op | 103 B/op | 5 allocs/op | +| BenchmarkProductDescription-10 | 348346 | 3434 ns/op | 549 B/op | 8 allocs/op | +| BenchmarkProductCategory-10 | 25139860 | 47.73 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkProductFeature-10 | 21264922 | 56.46 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkProductMaterial-10 | 18142828 | 66.24 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkProductUPC-10 | 1399148 | 859.1 ns/op | 96 B/op | 11 allocs/op | +| BenchmarkSchool-10 | 4161710 | 287.6 ns/op | 34 B/op | 1 allocs/op | +| BenchmarkLetter-10 | 73457020 | 16.29 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkLetterN-10 | 5060271 | 238.8 ns/op | 64 B/op | 2 allocs/op | +| BenchmarkVowel-10 | 58685206 | 20.87 ns/op | 4 B/op | 1 allocs/op | +| BenchmarkDigit-10 | 73944177 | 16.20 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkDigitN-10 | 5051070 | 236.6 ns/op | 64 B/op | 2 allocs/op | +| BenchmarkNumerify-10 | 6794545 | 176.4 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkLexify-10 | 11113212 | 108.3 ns/op | 8 B/op | 1 allocs/op | +| BenchmarkShuffleStrings-10 | 9924429 | 121.0 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkRandomString-10 | 73420688 | 16.34 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkTemplate-10 | 2488 | 477349 ns/op | 280877 B/op | 4611 allocs/op | +| BenchmarkDate-10 | 10292476 | 116.2 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPastDate-10 | 18285830 | 65.48 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkFutureDate-10 | 18399240 | 65.13 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkDateRange-10 | 8406979 | 142.7 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMonth-10 | 74105902 | 16.26 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMonthString-10 | 73647870 | 16.26 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkWeekDay-10 | 73990911 | 16.24 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkDay-10 | 73435291 | 16.21 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkYear-10 | 73950066 | 16.21 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkHour-10 | 74219916 | 16.21 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkMinute-10 | 74349634 | 16.21 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkSecond-10 | 73787313 | 16.29 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNanoSecond-10 | 74299380 | 16.15 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkTimeZone-10 | 19105237 | 62.83 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkTimeZoneFull-10 | 16170054 | 74.27 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkTimeZoneAbv-10 | 20725029 | 58.23 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkTimeZoneOffset-10 | 14597666 | 81.13 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkTimeZoneRegion-10 | 15733551 | 76.25 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkWeighted-10 | 28507484 | 40.42 ns/op | 16 B/op | 1 allocs/op | +| BenchmarkAdjective-10 | 6726474 | 178.3 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdjectiveDescriptive-10 | 16486224 | 73.39 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdjectiveQuantitative-10 | 15290762 | 78.51 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdjectiveProper-10 | 16535046 | 72.42 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdjectiveDemonstrative-10 | 16448917 | 73.41 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdjectivePossessive-10 | 15715839 | 73.22 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdjectiveInterrogative-10 | 15543478 | 77.43 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdjectiveIndefinite-10 | 16306894 | 73.50 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverb-10 | 7139924 | 168.7 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverbManner-10 | 17139112 | 70.37 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverbDegree-10 | 16213138 | 73.70 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverbPlace-10 | 17268267 | 69.67 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverbTimeDefinite-10 | 16273309 | 73.70 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverbTimeIndefinite-10 | 15822297 | 74.26 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverbFrequencyDefinite-10 | 16344181 | 73.30 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkAdverbFrequencyIndefinite-10 | 16118569 | 74.27 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkComment-10 | 1000000 | 1146 ns/op | 258 B/op | 6 allocs/op | +| BenchmarkConnective-10 | 7132710 | 168.3 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkConnectiveTime-10 | 15339457 | 78.08 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkConnectiveComparative-10 | 16188842 | 74.04 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkConnectiveComplaint-10 | 14127903 | 85.00 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkConnectiveListing-10 | 16073437 | 74.65 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkConnectiveCasual-10 | 13771904 | 87.06 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkConnectiveExamplify-10 | 15763296 | 76.03 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkWord-10 | 8047610 | 148.5 ns/op | 3 B/op | 0 allocs/op | +| BenchmarkSentenceSimple-10 | 682924 | 1707 ns/op | 590 B/op | 11 allocs/op | +| BenchmarkInterjection-10 | 16295702 | 73.50 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNoun-10 | 6711976 | 179.3 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounCommon-10 | 17117466 | 69.83 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounConcrete-10 | 17144979 | 69.81 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounAbstract-10 | 16839790 | 71.16 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounCollectivePeople-10 | 16360652 | 73.24 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounCollectiveAnimal-10 | 16453287 | 72.79 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounCollectiveThing-10 | 16397232 | 72.97 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounCountable-10 | 17171895 | 69.78 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounUncountable-10 | 17193412 | 69.75 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkNounProper-10 | 10644372 | 112.0 ns/op | 7 B/op | 0 allocs/op | +| BenchmarkNounDeterminer-10 | 17003730 | 70.44 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPhrase-10 | 23481584 | 51.12 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPhraseNoun-10 | 2961691 | 405.1 ns/op | 104 B/op | 2 allocs/op | +| BenchmarkPhraseVerb-10 | 1422132 | 845.1 ns/op | 232 B/op | 6 allocs/op | +| BenchmarkPhraseAdverb-10 | 7617193 | 153.3 ns/op | 9 B/op | 0 allocs/op | +| BenchmarkPhrasePreposition-10 | 2336155 | 513.3 ns/op | 123 B/op | 3 allocs/op | +| BenchmarkPreposition-10 | 9244665 | 129.9 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPrepositionSimple-10 | 16397623 | 73.11 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPrepositionDouble-10 | 16107751 | 74.19 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPrepositionCompound-10 | 16364900 | 73.10 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronoun-10 | 6436707 | 186.4 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounPersonal-10 | 16997427 | 70.53 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounObject-10 | 15303380 | 78.27 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounPossessive-10 | 15323908 | 78.10 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounReflective-10 | 15258552 | 78.45 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounIndefinite-10 | 16053780 | 74.69 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounDemonstrative-10 | 16476726 | 72.73 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounInterrogative-10 | 15526576 | 77.15 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkPronounRelative-10 | 14159284 | 84.64 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkSentence-10 | 721934 | 1642 ns/op | 219 B/op | 3 allocs/op | +| BenchmarkParagraph-10 | 39356 | 30481 ns/op | 6687 B/op | 53 allocs/op | +| BenchmarkQuestion-10 | 1757269 | 683.1 ns/op | 243 B/op | 3 allocs/op | +| BenchmarkQuote-10 | 1522988 | 787.2 ns/op | 261 B/op | 3 allocs/op | +| BenchmarkVerb-10 | 8924802 | 127.6 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkVerbAction-10 | 17150564 | 69.83 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkVerbTransitive-10 | 17328488 | 69.21 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkVerbIntransitive-10 | 16427985 | 72.98 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkVerbLinking-10 | 17754280 | 67.52 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkVerbHelping-10 | 17118238 | 70.31 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkXMLLookup100-10 | 937 | 1279022 ns/op | 862536 B/op | 11370 allocs/op | \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/v7/CODE_OF_CONDUCT.md b/vendor/github.com/brianvoe/gofakeit/v7/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..99d12c90fe --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at brian@webiswhatido.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/brianvoe/gofakeit/v7/CONTRIBUTING.md b/vendor/github.com/brianvoe/gofakeit/v7/CONTRIBUTING.md new file mode 100644 index 0000000000..5a4812c28e --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/CONTRIBUTING.md @@ -0,0 +1 @@ +# Make a pull request and submit it and ill take a look at it. Thanks! diff --git a/vendor/github.com/brianvoe/gofakeit/v7/LICENSE.txt b/vendor/github.com/brianvoe/gofakeit/v7/LICENSE.txt new file mode 100644 index 0000000000..21984c9d5e --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/LICENSE.txt @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) [year] [fullname] + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/v7/README.md b/vendor/github.com/brianvoe/gofakeit/v7/README.md new file mode 100644 index 0000000000..23d39b2e2a --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/README.md @@ -0,0 +1,886 @@ +![Gofakeit](https://raw.githubusercontent.com/brianvoe/gofakeit/master/logo.png) + +# Gofakeit [![Go Report Card](https://goreportcard.com/badge/github.com/brianvoe/gofakeit)](https://goreportcard.com/report/github.com/brianvoe/gofakeit) ![Test](https://github.com/brianvoe/gofakeit/workflows/Test/badge.svg?branch=master) [![GoDoc](https://godoc.org/github.com/brianvoe/gofakeit/v7?status.svg)](https://godoc.org/github.com/brianvoe/gofakeit/v7) [![license](http://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/brianvoe/gofakeit/master/LICENSE.txt) + +Random data generator written in go + +## Support + +[![ko-fi](https://ko-fi.com/img/githubbutton_sm.svg)](https://ko-fi.com/G2G0R5EJT) Buy Me A Coffee + +## Merch + +[![Merch](https://raw.githubusercontent.com/brianvoe/gofakeit/master/merch.png)](https://gofakeit-buy-shop.fourthwall.com) + +## Features + +- [310+ Functions!!!](#functions) +- [Random Sources](#random-sources) +- [Global Rand](#global-rand-set) +- [Struct Generator](#struct) +- [Custom Functions](#custom-functions) +- [Templates](#templates) +- [Http Server](https://github.com/brianvoe/gofakeit/tree/master/cmd/gofakeitserver) +- [Command Line Tool](https://github.com/brianvoe/gofakeit/tree/master/cmd/gofakeit) +- Zero dependencies +- [Benchmarks](https://github.com/brianvoe/gofakeit/blob/master/BENCHMARKS.md) +- [Issue](https://github.com/brianvoe/gofakeit/issues) + +## Contributors + +Thank you to all our Gofakeit contributors! + + + + + +## Installation + +```go +go get github.com/brianvoe/gofakeit/v7 +``` + +## Simple Usage + +```go +import "github.com/brianvoe/gofakeit/v7" + +gofakeit.Name() // Markus Moen +gofakeit.Email() // alaynawuckert@kozey.biz +gofakeit.Phone() // (570)245-7485 +gofakeit.BS() // front-end +gofakeit.BeerName() // Duvel +gofakeit.Color() // MediumOrchid +gofakeit.Company() // Moen, Pagac and Wuckert +gofakeit.CreditCardNumber(nil) // 4287271570245748 +gofakeit.HackerPhrase() // Connecting the array won't do anything, we need to generate the haptic COM driver! +gofakeit.JobTitle() // Director +gofakeit.CurrencyShort() // USD +``` + +[See full list of functions](#functions) + +## Seed + +If you are using the default global usage and dont care about seeding no need to set anything. +Gofakeit will seed it with a cryptographically secure number. + +If you need a reproducible outcome you can set it via the Seed function call. Every example in +this repo sets it for testing purposes. + +```go +import "github.com/brianvoe/gofakeit/v7" + +gofakeit.Seed(0) // If 0 will use crypto/rand to generate a number + +// or + +gofakeit.Seed(8675309) // Set it to whatever number you want +``` + +## Random Sources + +Gofakeit has a few rand sources, by default it uses math/rand/v2 PCG which is a pseudo random number generator and is thread locked. + +If you want to see other potential sources you can see the sub package [Source](https://github.com/brianvoe/gofakeit/tree/master/source) for more information. + +```go +import ( + "github.com/brianvoe/gofakeit/v7" + "github.com/brianvoe/gofakeit/v7/source" + "math/rand/v2" +) + +// Uses math/rand/v2(PCG Pseudo) with mutex locking +faker := gofakeit.New(0) + +// NewFaker takes in a source and whether or not it should be thread safe +faker := gofakeit.NewFaker(src rand.Source, lock bool) + +// PCG Pseudo +faker := gofakeit.NewFaker(rand.NewPCG(11, 11), true) + +// ChaCha8 +faker := gofakeit.NewFaker(rand.NewChaCha8([32]byte{0, 1, 2, 3, 4, 5}), true) + + +// Additional from Gofakeit sub package source + +// JSF(Jenkins Small Fast) +faker := gofakeit.NewFaker(source.NewJSF(11), true) + +// SFC(Simple Fast Counter) +faker := gofakeit.NewFaker(source.NewSFC(11), true) + +// Crypto - Uses crypto/rand +faker := gofakeit.NewFaker(source.NewCrypto(), true) + +// Dumb - simple incrementing number +faker := gofakeit.NewFaker(source.NewDumb(11), true) +``` + +## Global Rand Set + +If you would like to use the simple function calls but need to use something like +crypto/rand you can override the default global with the random source that you want. + +```go +import "github.com/brianvoe/gofakeit/v7" + +gofakeit.GlobalFaker = gofakeit.New(0) +``` + +## Struct + +Gofakeit can generate random data for struct fields. For the most part it covers all the basic type +as well as some non-basic like time.Time. + +Struct fields can also use tags to more specifically generate data for that field type. + +```go +import "github.com/brianvoe/gofakeit/v7" + +// Create structs with random injected data +type Foo struct { + Str string + Int int + Pointer *int + Name string `fake:"{firstname}"` // Any available function all lowercase + Sentence string `fake:"{sentence:3}"` // Can call with parameters + RandStr string `fake:"{randomstring:[hello,world]}"` + Number string `fake:"{number:1,10}"` // Comma separated for multiple values + Regex string `fake:"{regex:[abcdef]{5}}"` // Generate string from regex + Map map[string]int `fakesize:"2"` + Array []string `fakesize:"2"` + ArrayRange []string `fakesize:"2,6"` + Bar Bar + Skip *string `fake:"skip"` // Set to "skip" to not generate data for + SkipAlt *string `fake:"-"` // Set to "-" to not generate data for + Created time.Time // Can take in a fake tag as well as a format tag + CreatedFormat time.Time `fake:"{year}-{month}-{day}" format:"2006-01-02"` +} + +type Bar struct { + Name string + Number int + Float float32 +} + +// Pass your struct as a pointer +var f Foo +err := gofakeit.Struct(&f) + +fmt.Println(f.Str) // hrukpttuezptneuvunh +fmt.Println(f.Int) // -7825289004089916589 +fmt.Println(*f.Pointer) // -343806609094473732 +fmt.Println(f.Name) // fred +fmt.Println(f.Sentence) // Record river mind. +fmt.Println(f.RandStr) // world +fmt.Println(f.Number) // 4 +fmt.Println(f.Regex) // cbdfc +fmt.Println(f.Map) // map[PxLIo:52 lxwnqhqc:846] +fmt.Println(f.Array) // cbdfc +fmt.Printf("%+v", f.Bar) // {Name:QFpZ Number:-2882647639396178786 Float:1.7636692e+37} +fmt.Println(f.Skip) // +fmt.Println(f.Created.String()) // 1908-12-07 04:14:25.685339029 +0000 UTC + +// Supported formats +// int, int8, int16, int32, int64, +// uint, uint8, uint16, uint32, uint64, +// float32, float64, +// bool, string, +// array, pointers, map +// time.Time // If setting time you can also set a format tag +// Nested Struct Fields and Embedded Fields +``` + +## Fakeable types + +It is possible to extend a struct by implementing the `Fakeable` interface +in order to control the generation. + +For example, this is useful when it is not possible to modify the struct that you want to fake by adding struct tags to a field but you still need to be able to control the generation process. + +```go +// Custom string that you want to generate your own data for +type Friend string + +func (c *Friend) Fake(f *gofakeit.Faker) (any, error) { + // Can call any other faker methods + return f.RandomString([]string{"billy", "fred", "susan"}), nil +} + +// Custom time that you want to generate your own data for +type Age time.Time + +func (c *Age) Fake(f *gofakeit.Faker) (any, error) { + return Age(f.DateRange(time.Now().AddDate(-100, 0, 0), time.Now().AddDate(-18, 0, 0))), nil +} + +// This is the struct that we cannot modify to add struct tags +type User struct { + Name Friend + Age *Age +} + +var u User +gofakeit.Struct(&u) +fmt.Println(u.Name) // billy +fmt.Println(time.Time(*u.Age)) // 1990-12-07 04:14:25.685339029 +0000 UTC +``` + +## Custom Functions + +In a lot of situations you may need to use your own random function usage for your specific needs. + +If you would like to extend the usage of struct tags, generate function, available usages in the gofakeit server +or gofakeit command sub packages. You can do so via the AddFuncLookup. Each function has their own lookup, if +you need more reference examples you can look at each files lookups. + +```go +// Simple +gofakeit.AddFuncLookup("friendname", gofakeit.Info{ + Category: "custom", + Description: "Random friend name", + Example: "bill", + Output: "string", + Generate: func(f *gofakeit.Faker, m *gofakeit.MapParams, info *gofakeit.Info) (any, error) { + return f.RandomString([]string{"bill", "bob", "sally"}), nil + }, +}) + +// With Params +gofakeit.AddFuncLookup("jumbleword", gofakeit.Info{ + Category: "jumbleword", + Description: "Take a word and jumble it up", + Example: "loredlowlh", + Output: "string", + Params: []gofakeit.Param{ + {Field: "word", Type: "string", Description: "Word you want to jumble"}, + }, + Generate: func(f *gofakeit.Faker, m *gofakeit.MapParams, info *gofakeit.Info) (any, error) { + word, err := info.GetString(m, "word") + if err != nil { + return nil, err + } + + split := strings.Split(word, "") + f.ShuffleStrings(split) + return strings.Join(split, ""), nil + }, +}) + +type Foo struct { + FriendName string `fake:"{friendname}"` + JumbleWord string `fake:"{jumbleword:helloworld}"` +} + +var f Foo +gofakeit.Struct(&f) +fmt.Println(f.FriendName) // bill +fmt.Println(f.JumbleWord) // loredlowlh +``` + +## Templates + +Generate custom outputs using golang's template engine [https://pkg.go.dev/text/template](https://pkg.go.dev/text/template). + +We have added all the available functions to the template engine as well as some additional ones that are useful for template building. + +Additional Available Functions +```go +- ToUpper(s string) string // Make string upper case +- ToLower(s string) string // Make string lower case +- ToString(s any) // Convert to string +- ToDate(s string) time.Time // Convert string to date +- SpliceAny(args ...any) []any // Build a slice of anys, used with Weighted +- SpliceString(args ...string) []string // Build a slice of strings, used with Teams and RandomString +- SpliceUInt(args ...uint) []uint // Build a slice of uint, used with Dice and RandomUint +- SpliceInt(args ...int) []int // Build a slice of int, used with RandomInt +``` + +
+ Unavailable Gofakeit functions + +```go +// Any functions that dont have a return value +- AnythingThatReturnsVoid(): void + +// Not available to use in templates +- Template(co *TemplateOptions) ([]byte, error) +- RandomMapKey(mapI any) any +``` +
+ + +### Example Usages + +```go +import "github.com/brianvoe/gofakeit/v7" + +func main() { + // Accessing the Lines variable from within the template. + template := ` + Subject: {{RandomString (SliceString "Greetings" "Hello" "Hi")}} + + Dear {{LastName}}, + + {{RandomString (SliceString "Greetings!" "Hello there!" "Hi, how are you?")}} + + {{Paragraph 1 5 10 "\n\n"}} + + {{RandomString (SliceString "Warm regards" "Best wishes" "Sincerely")}} + {{$person:=Person}} + {{$person.FirstName}} {{$person.LastName}} + {{$person.Contact.Email}} + {{$person.Contact.Phone}} + ` + + value, err := gofakeit.Template(template, &gofakeit.TemplateOptions{Data: 5}) + + if err != nil { + fmt.Println(err) + } + + fmt.Println(value) +} +``` + +Output: +```text +Subject: Hello + +Dear Krajcik, + +Greetings! + +Quia voluptatem voluptatem voluptatem. Quia voluptatem voluptatem voluptatem. Quia voluptatem voluptatem voluptatem. + +Warm regards + +Kaitlyn Krajcik +kaitlynkrajcik@krajcik +570-245-7485 +``` + +## Functions + +All functions also exist as methods on the Faker struct + +### File + +Passing `nil` to `CSV`, `JSON` or `XML` will auto generate data using default values. + +```go +CSV(co *CSVOptions) ([]byte, error) +JSON(jo *JSONOptions) ([]byte, error) +XML(xo *XMLOptions) ([]byte, error) +FileExtension() string +FileMimeType() string +``` + +### Template + +Passing `nil` will auto generate data using default values. + +```go +Template(co *TemplateOptions) (string, error) +Markdown(co *MarkdownOptions) (string, error) +EmailText(co *EmailOptions) (string, error) +FixedWidth(co *FixedWidthOptions) (string, error) +``` + +### Product + +```go +Product() *ProductInfo +ProductName() string +ProductDescription() string +ProductCategory() string +ProductFeature() string +ProductMaterial() string +ProductUPC() string +ProductAudience() string +ProductDimension() string +ProductUseCase() string +ProductBenefit() string +ProductSuffix() string +ProductISBN(opts *ISBNOptions) string + +``` + +### Person + +```go +Person() *PersonInfo +Name() string +NamePrefix() string +NameSuffix() string +FirstName() string +MiddleName() string +LastName() string +Gender() string +SSN() string +EIN() string +Hobby() string +Contact() *ContactInfo +Email() string +Phone() string +PhoneFormatted() string +Teams(peopleArray []string, teamsArray []string) map[string][]string +``` + +### Generate + +```go +Struct(v any) +Slice(v any) +Map() map[string]any +Generate(value string) string +Regex(value string) string +``` + +### Auth + +```go +Username() string +Password(lower bool, upper bool, numeric bool, special bool, space bool, num int) string +``` + +### Address + +```go +Address() *AddressInfo +City() string +Country() string +CountryAbr() string +State() string +StateAbr() string +Street() string +StreetName() string +StreetNumber() string +StreetPrefix() string +StreetSuffix() string +Unit() string +Zip() string +Latitude() float64 +LatitudeInRange(min, max float64) (float64, error) +Longitude() float64 +LongitudeInRange(min, max float64) (float64, error) +``` + +### Game + +```go +Gamertag() string +Dice(numDice uint, sides []uint) []uint +``` + +### Beer + +```go +BeerAlcohol() string +BeerBlg() string +BeerHop() string +BeerIbu() string +BeerMalt() string +BeerName() string +BeerStyle() string +BeerYeast() string +``` + +### Car + +```go +Car() *CarInfo +CarMaker() string +CarModel() string +CarType() string +CarFuelType() string +CarTransmissionType() string +``` + +### Words + +```go +// Nouns +Noun() string +NounCommon() string +NounConcrete() string +NounAbstract() string +NounCollectivePeople() string +NounCollectiveAnimal() string +NounCollectiveThing() string +NounCountable() string +NounUncountable() string + +// Verbs +Verb() string +VerbAction() string +VerbLinking() string +VerbHelping() string + +// Adverbs +Adverb() string +AdverbManner() string +AdverbDegree() string +AdverbPlace() string +AdverbTimeDefinite() string +AdverbTimeIndefinite() string +AdverbFrequencyDefinite() string +AdverbFrequencyIndefinite() string + +// Propositions +Preposition() string +PrepositionSimple() string +PrepositionDouble() string +PrepositionCompound() string + +// Adjectives +Adjective() string +AdjectiveDescriptive() string +AdjectiveQuantitative() string +AdjectiveProper() string +AdjectiveDemonstrative() string +AdjectivePossessive() string +AdjectiveInterrogative() string +AdjectiveIndefinite() string + +// Pronouns +Pronoun() string +PronounPersonal() string +PronounObject() string +PronounPossessive() string +PronounReflective() string +PronounDemonstrative() string +PronounInterrogative() string +PronounRelative() string + +// Connectives +Connective() string +ConnectiveTime() string +ConnectiveComparative() string +ConnectiveComplaint() string +ConnectiveListing() string +ConnectiveCasual() string +ConnectiveExamplify() string + +// Words +Word() string + +// Sentences +Sentence(wordCount int) string +Paragraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string +LoremIpsumWord() string +LoremIpsumSentence(wordCount int) string +LoremIpsumParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string +Question() string +Quote() string +Phrase() string +``` + +### Foods + +```go +Fruit() string +Vegetable() string +Breakfast() string +Lunch() string +Dinner() string +Snack() string +Dessert() string +``` + +### Misc + +```go +Bool() bool +UUID() string +Weighted(options []any, weights []float32) (any, error) +FlipACoin() string +RandomMapKey(mapI any) any +ShuffleAnySlice(v any) +``` + +### Colors + +```go +Color() string +HexColor() string +RGBColor() []int +SafeColor() string +NiceColors() string +``` + +### Images + +```go +Image(width int, height int) *img.RGBA +ImageJpeg(width int, height int) []byte +ImagePng(width int, height int) []byte +``` + +### Internet + +```go +URL() string +DomainName() string +DomainSuffix() string +IPv4Address() string +IPv6Address() string +MacAddress() string +HTTPStatusCode() string +HTTPStatusCodeSimple() int +LogLevel(logType string) string +HTTPMethod() string +HTTPVersion() string +UserAgent() string +ChromeUserAgent() string +FirefoxUserAgent() string +OperaUserAgent() string +SafariUserAgent() string +``` + +### HTML + +```go +InputName() string +Svg(options *SVGOptions) string +``` + +### Date/Time + +```go +Date() time.Time +PastDate() time.Time +FutureDate() time.Time +DateRange(start, end time.Time) time.Time +NanoSecond() int +Second() int +Minute() int +Hour() int +Month() int +MonthString() string +Day() int +WeekDay() string +Year() int +TimeZone() string +TimeZoneAbv() string +TimeZoneFull() string +TimeZoneOffset() float32 +TimeZoneRegion() string +``` + +### Payment + +```go +Price(min, max float64) float64 +CreditCard() *CreditCardInfo +CreditCardCvv() string +CreditCardExp() string +CreditCardNumber(*CreditCardOptions) string +CreditCardType() string +Currency() *CurrencyInfo +CurrencyLong() string +CurrencyShort() string +AchRouting() string +AchAccount() string +BitcoinAddress() string +BitcoinPrivateKey() string +BankName() string +BankType() string +``` + +### Finance + +```go +Cusip() string +Isin() string +``` + +### Company + +```go +BS() string +Blurb() string +BuzzWord() string +Company() string +CompanySuffix() string +Job() *JobInfo +JobDescriptor() string +JobLevel() string +JobTitle() string +Slogan() string +``` + +### Hacker + +```go +HackerAbbreviation() string +HackerAdjective() string +Hackeringverb() string +HackerNoun() string +HackerPhrase() string +HackerVerb() string +``` + +### Hipster + +```go +HipsterWord() string +HipsterSentence(wordCount int) string +HipsterParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string +``` + +### App + +```go +AppName() string +AppVersion() string +AppAuthor() string +``` + +### Animal + +```go +PetName() string +Animal() string +AnimalType() string +FarmAnimal() string +Cat() string +Dog() string +Bird() string +``` + +### Emoji + +```go +Emoji() string +EmojiDescription() string +EmojiCategory() string +EmojiAlias() string +EmojiTag() string +``` + +### Language + +```go +Language() string +LanguageAbbreviation() string +ProgrammingLanguage() string +ProgrammingLanguageBest() string +``` + +### Number + +```go +Number(min int, max int) int +Int() int +IntN(n int) int +Int8() int8 +Int16() int16 +Int32() int32 +Int64() int64 +Uint() uint +UintN(n uint) uint +Uint8() uint8 +Uint16() uint16 +Uint32() uint32 +Uint64() uint64 +Float32() float32 +Float32Range(min, max float32) float32 +Float64() float64 +Float64Range(min, max float64) float64 +ShuffleInts(a []int) +RandomInt(i []int) int +HexUint(bitsize int) string +``` + +### String + +```go +Digit() string +DigitN(n uint) string +Letter() string +LetterN(n uint) string +Lexify(str string) string +Numerify(str string) string +ShuffleStrings(a []string) +RandomString(a []string) string +``` + +### Celebrity + +```go +CelebrityActor() string +CelebrityBusiness() string +CelebritySport() string +``` + +### Minecraft + +```go +MinecraftOre() string +MinecraftWood() string +MinecraftArmorTier() string +MinecraftArmorPart() string +MinecraftWeapon() string +MinecraftTool() string +MinecraftDye() string +MinecraftFood() string +MinecraftAnimal() string +MinecraftVillagerJob() string +MinecraftVillagerStation() string +MinecraftVillagerLevel() string +MinecraftMobPassive() string +MinecraftMobNeutral() string +MinecraftMobHostile() string +MinecraftMobBoss() string +MinecraftBiome() string +MinecraftWeather() string +``` + +### Book + +```go +Book() *BookInfo +BookTitle() string +BookAuthor() string +BookGenre() string +``` + +### Movie + +```go +Movie() *MovieInfo +MovieName() string +MovieGenre() string +``` + +### Error + +```go +Error() error +ErrorDatabase() error +ErrorGRPC() error +ErrorHTTP() error +ErrorHTTPClient() error +ErrorHTTPServer() error +ErrorInput() error +ErrorRuntime() error +``` + +### School + +```go +School() string +``` + +### Song + +```go +Song() *SongInfo +SongName() string +SongArtist() string +SongGenre() string +``` \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/v7/address.go b/vendor/github.com/brianvoe/gofakeit/v7/address.go new file mode 100644 index 0000000000..7638841338 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/address.go @@ -0,0 +1,477 @@ +package gofakeit + +import ( + "errors" + "strings" +) + +// AddressInfo is a struct full of address information +type AddressInfo struct { + Address string `json:"address" xml:"address"` + Street string `json:"street" xml:"street"` + Unit string `json:"unit" xml:"unit"` + City string `json:"city" xml:"city"` + State string `json:"state" xml:"state"` + Zip string `json:"zip" xml:"zip"` + Country string `json:"country" xml:"country"` + Latitude float64 `json:"latitude" xml:"latitude"` + Longitude float64 `json:"longitude" xml:"longitude"` +} + +// Address will generate a struct of address information +func Address() *AddressInfo { return address(GlobalFaker) } + +// Address will generate a struct of address information +func (f *Faker) Address() *AddressInfo { return address(f) } + +func address(f *Faker) *AddressInfo { + street := street(f) + city := city(f) + state := state(f) + zip := zip(f) + + // 30% chance to include a unit in the address + var unitStr string + var unitField string + if randIntRange(f, 1, 10) <= 3 { + unitStr = ", " + unit(f) + unitField = unit(f) + } + + addressStr := street + unitStr + ", " + city + ", " + state + " " + zip + + return &AddressInfo{ + Address: addressStr, + Street: street, + Unit: unitField, + City: city, + State: state, + Zip: zip, + Country: country(f), + Latitude: latitude(f), + Longitude: longitude(f), + } +} + +// Street will generate a random address street string +func Street() string { return street(GlobalFaker) } + +// Street will generate a random address street string +func (f *Faker) Street() string { return street(f) } + +func street(f *Faker) string { + var street = "" + switch randInt := randIntRange(f, 1, 2); randInt { + case 1: + street = streetNumber(f) + " " + streetPrefix(f) + " " + streetName(f) + streetSuffix(f) + case 2: + street = streetNumber(f) + " " + streetName(f) + streetSuffix(f) + } + + return street +} + +// StreetNumber will generate a random address street number string +func StreetNumber() string { return streetNumber(GlobalFaker) } + +// StreetNumber will generate a random address street number string +func (f *Faker) StreetNumber() string { return streetNumber(f) } + +func streetNumber(f *Faker) string { + return strings.TrimLeft(replaceWithNumbers(f, getRandValue(f, []string{"address", "number"})), "0") +} + +// StreetPrefix will generate a random address street prefix string +func StreetPrefix() string { return streetPrefix(GlobalFaker) } + +// StreetPrefix will generate a random address street prefix string +func (f *Faker) StreetPrefix() string { return streetPrefix(f) } + +func streetPrefix(f *Faker) string { return getRandValue(f, []string{"address", "street_prefix"}) } + +// StreetName will generate a random address street name string +func StreetName() string { return streetName(GlobalFaker) } + +// StreetName will generate a random address street name string +func (f *Faker) StreetName() string { return streetName(f) } + +func streetName(f *Faker) string { return getRandValue(f, []string{"address", "street_name"}) } + +// StreetSuffix will generate a random address street suffix string +func StreetSuffix() string { return streetSuffix(GlobalFaker) } + +// StreetSuffix will generate a random address street suffix string +func (f *Faker) StreetSuffix() string { return streetSuffix(f) } + +func streetSuffix(f *Faker) string { return getRandValue(f, []string{"address", "street_suffix"}) } + +// Unit will generate a random unit string +func Unit() string { return unit(GlobalFaker) } + +// Unit will generate a random unit string +func (f *Faker) Unit() string { return unit(f) } + +func unit(f *Faker) string { + unitType := getRandValue(f, []string{"address", "unit"}) + unitNumber := replaceWithNumbers(f, "###") + return unitType + " " + unitNumber +} + +// City will generate a random city string +func City() string { return city(GlobalFaker) } + +// City will generate a random city string +func (f *Faker) City() string { return city(f) } + +func city(f *Faker) string { return getRandValue(f, []string{"address", "city"}) } + +// State will generate a random state string +func State() string { return state(GlobalFaker) } + +// State will generate a random state string +func (f *Faker) State() string { return state(f) } + +func state(f *Faker) string { return getRandValue(f, []string{"address", "state"}) } + +// StateAbr will generate a random abbreviated state string +func StateAbr() string { return stateAbr(GlobalFaker) } + +// StateAbr will generate a random abbreviated state string +func (f *Faker) StateAbr() string { return stateAbr(f) } + +func stateAbr(f *Faker) string { return getRandValue(f, []string{"address", "state_abr"}) } + +// Zip will generate a random Zip code string +func Zip() string { return zip(GlobalFaker) } + +// Zip will generate a random Zip code string +func (f *Faker) Zip() string { return zip(f) } + +func zip(f *Faker) string { + return replaceWithNumbers(f, getRandValue(f, []string{"address", "zip"})) +} + +// Country will generate a random country string +func Country() string { return country(GlobalFaker) } + +// Country will generate a random country string +func (f *Faker) Country() string { return country(f) } + +func country(f *Faker) string { return getRandValue(f, []string{"address", "country"}) } + +// CountryAbr will generate a random abbreviated country string +func CountryAbr() string { return countryAbr(GlobalFaker) } + +// CountryAbr will generate a random abbreviated country string +func (f *Faker) CountryAbr() string { return countryAbr(f) } + +func countryAbr(f *Faker) string { return getRandValue(f, []string{"address", "country_abr"}) } + +// Latitude will generate a random latitude float64 +func Latitude() float64 { return latitude(GlobalFaker) } + +// Latitude will generate a random latitude float64 +func (f *Faker) Latitude() float64 { return latitude(f) } + +func latitude(f *Faker) float64 { return toFixed((f.Float64()*180)-90, 6) } + +// LatitudeInRange will generate a random latitude within the input range +func LatitudeInRange(min, max float64) (float64, error) { + return latitudeInRange(GlobalFaker, min, max) +} + +// LatitudeInRange will generate a random latitude within the input range +func (f *Faker) LatitudeInRange(min, max float64) (float64, error) { + return latitudeInRange(f, min, max) +} + +func latitudeInRange(f *Faker, min, max float64) (float64, error) { + if min > max || min < -90 || min > 90 || max < -90 || max > 90 { + return 0, errors.New("invalid min or max range, must be valid floats and between -90 and 90") + } + return toFixed(float64Range(f, min, max), 6), nil +} + +// Longitude will generate a random longitude float64 +func Longitude() float64 { return longitude(GlobalFaker) } + +// Longitude will generate a random longitude float64 +func (f *Faker) Longitude() float64 { return longitude(f) } + +func longitude(f *Faker) float64 { return toFixed((f.Float64()*360)-180, 6) } + +// LongitudeInRange will generate a random longitude within the input range +func LongitudeInRange(min, max float64) (float64, error) { + return longitudeInRange(GlobalFaker, min, max) +} + +// LongitudeInRange will generate a random longitude within the input range +func (f *Faker) LongitudeInRange(min, max float64) (float64, error) { + return longitudeInRange(f, min, max) +} + +func longitudeInRange(f *Faker, min, max float64) (float64, error) { + if min > max || min < -180 || min > 180 || max < -180 || max > 180 { + return 0, errors.New("invalid min or max range, must be valid floats and between -180 and 180") + } + return toFixed(float64Range(f, min, max), 6), nil +} + +func addAddressLookup() { + AddFuncLookup("address", Info{ + Display: "Address", + Category: "address", + Description: "Residential location including street, city, state, country and postal code", + Example: `{ + "address": "364 Unionsville, Apt 123, Norfolk, Ohio 99536", + "street": "364 Unionsville", + "apartment": "Apt 123", + "city": "Norfolk", + "state": "Ohio", + "zip": "99536", + "country": "Lesotho", + "latitude": 88.792592, + "longitude": 174.504681 +}`, + Output: "map[string]any", + ContentType: "application/json", + Aliases: []string{"full address", "residential address", "mailing address", "street address", "home address"}, + Keywords: []string{"address", "residential", "location", "street", "city", "state", "country", "postal", "code", "mailing", "home", "house", "apartment", "zipcode", "coordinates"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return address(f), nil + }, + }) + + AddFuncLookup("city", Info{ + Display: "City", + Category: "address", + Description: "Part of a country with significant population, often a central hub for culture and commerce", + Example: "Marcelside", + Output: "string", + Aliases: []string{"city name", "urban area", "municipality name", "town name", "metropolitan area"}, + Keywords: []string{"city", "town", "municipality", "urban", "area", "population", "hub", "culture", "commerce", "metropolitan", "settlement", "community", "district"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return city(f), nil + }, + }) + + AddFuncLookup("country", Info{ + Display: "Country", + Category: "address", + Description: "Nation with its own government and defined territory", + Example: "United States of America", + Output: "string", + Aliases: []string{"country name", "nation name", "sovereign state", "national territory", "independent country"}, + Keywords: []string{"country", "nation", "government", "territory", "sovereign", "independent", "state", "republic", "kingdom", "empire", "federation", "commonwealth"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return country(f), nil + }, + }) + + AddFuncLookup("countryabr", Info{ + Display: "Country Abbreviation", + Category: "address", + Description: "Shortened 2-letter form of a country's name", + Example: "US", + Output: "string", + Aliases: []string{"country code", "iso alpha-2", "iso3166-1 alpha-2", "two-letter country", "country short code"}, + Keywords: []string{"country", "abbreviation", "shortened", "2-letter", "nation", "iso", "code", "alpha-2", "iso3166-1", "standard", "international", "identifier"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return countryAbr(f), nil + }, + }) + + AddFuncLookup("state", Info{ + Display: "State", + Category: "address", + Description: "Governmental division within a country, often having its own laws and government", + Example: "Illinois", + Output: "string", + Aliases: []string{"state name", "province name", "region name", "administrative division", "territory name"}, + Keywords: []string{"state", "province", "region", "division", "governmental", "territory", "area", "laws", "government", "administrative", "subdivision", "district", "county"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return state(f), nil + }, + }) + + AddFuncLookup("stateabr", Info{ + Display: "State Abbreviation", + Category: "address", + Description: "Shortened 2-letter form of a state or province", + Example: "IL", + Output: "string", + Aliases: []string{"state code", "province code", "region code", "usps code", "iso3166-2 code"}, + Keywords: []string{"state", "abbreviation", "shortened", "2-letter", "region", "province", "country", "code", "usps", "iso3166-2", "identifier"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return stateAbr(f), nil + }, + }) + + AddFuncLookup("street", Info{ + Display: "Street", + Category: "address", + Description: "Public road in a city or town, typically with houses and buildings on each side", + Example: "364 East Rapidsborough", + Output: "string", + Aliases: []string{"street address", "shipping address", "billing address", "mailing address", "address line 1", "line 1", "road address", "avenue address", "drive address", "thoroughfare address"}, + Keywords: []string{"address", "street", "road", "avenue", "drive", "lane", "way", "public", "thoroughfare", "boulevard", "court", "place", "circle", "terrace", "highway"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return street(f), nil + }, + }) + + AddFuncLookup("streetname", Info{ + Display: "Street Name", + Category: "address", + Description: "Name given to a specific road or street", + Example: "View", + Output: "string", + Aliases: []string{"street title", "road name", "avenue name", "drive name", "thoroughfare name"}, + Keywords: []string{"street", "name", "road", "avenue", "drive", "lane", "way", "thoroughfare", "specific", "title", "designation", "label", "identifier"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return streetName(f), nil + }, + }) + + AddFuncLookup("streetnumber", Info{ + Display: "Street Number", + Category: "address", + Description: "Numerical identifier assigned to a street", + Example: "13645", + Output: "string", + Aliases: []string{"house number", "building number", "address number", "street identifier", "numerical address"}, + Keywords: []string{"street", "number", "identifier", "numerical", "address", "location", "building", "assigned", "house", "digit", "numeric", "sequence", "position"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return streetNumber(f), nil + }, + }) + + AddFuncLookup("streetprefix", Info{ + Display: "Street Prefix", + Category: "address", + Description: "Directional or descriptive term preceding a street name (e.g., 'East', 'N')", + Example: "East", + Output: "string", + Aliases: []string{"directional prefix", "street prefix", "name prefix", "road prefix", "thoroughfare prefix"}, + Keywords: []string{"street", "prefix", "directional", "north", "south", "east", "west", "n", "s", "e", "w"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return streetPrefix(f), nil + }, + }) + + AddFuncLookup("streetsuffix", Info{ + Display: "Street Suffix", + Category: "address", + Description: "Designation at the end of a street name indicating type (e.g., 'Ave', 'St')", + Example: "Ave", + Output: "string", + Aliases: []string{"street type", "road type", "avenue suffix", "thoroughfare suffix", "street ending"}, + Keywords: []string{"street", "suffix", "designation", "type", "ave", "st", "rd", "dr", "ln", "blvd", "ct", "pl", "cir", "ter", "hwy"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return streetSuffix(f), nil + }, + }) + + AddFuncLookup("unit", Info{ + Display: "Unit", + Category: "address", + Description: "Unit identifier within a building, such as apartment number, suite, or office", + Example: "Apt 123", + Output: "string", + Aliases: []string{"apartment unit", "suite number", "office number", "building unit", "room number", "address line 2", "line 2"}, + Keywords: []string{"unit", "apartment", "suite", "office", "identifier", "building", "number", "within", "room", "floor", "level", "section", "compartment"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return unit(f), nil + }, + }) + + AddFuncLookup("zip", Info{ + Display: "Zip", + Category: "address", + Description: "Numerical code for postal address sorting, specific to a geographic area", + Example: "13645", + Output: "string", + Aliases: []string{"zip code", "postal code", "mail code", "delivery code"}, + Keywords: []string{"zip", "postal", "postcode", "code", "address", "sorting", "geographic", "area", "numerical", "mailing", "delivery", "zone", "district", "region", "identifier"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return zip(f), nil + }, + }) + + AddFuncLookup("latitude", Info{ + Display: "Latitude", + Category: "address", + Description: "Geographic coordinate specifying north-south position on Earth's surface", + Example: "-73.534056", + Output: "float", + Aliases: []string{"lat coordinate", "north-south coordinate", "geographic latitude", "earth latitude", "position latitude"}, + Keywords: []string{"latitude", "lat", "coordinate", "north-south", "degrees", "gps", "wgs84", "geodesy", "parallel", "equator", "pole"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return latitude(f), nil + }, + }) + + AddFuncLookup("latituderange", Info{ + Display: "Latitude Range", + Category: "address", + Description: "Latitude number between the given range (default min=0, max=90)", + Example: "22.921026", + Output: "float", + Aliases: []string{"latitude bounds", "lat range", "north-south range", "geographic bounds", "coordinate range"}, + Keywords: []string{"latitude", "lat", "range", "min", "max", "degrees", "gps", "wgs84", "bounds", "interval"}, + Params: []Param{ + {Field: "min", Display: "Min", Type: "float", Default: "0", Description: "Minimum range"}, + {Field: "max", Display: "Max", Type: "float", Default: "90", Description: "Maximum range"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetFloat64(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetFloat64(m, "max") + if err != nil { + return nil, err + } + return latitudeInRange(f, min, max) + }, + }) + + AddFuncLookup("longitude", Info{ + Display: "Longitude", + Category: "address", + Description: "Geographic coordinate indicating east-west position on Earth's surface", + Example: "-147.068112", + Output: "float", + Aliases: []string{"long coordinate", "east-west coordinate", "geographic longitude", "earth longitude", "position longitude"}, + Keywords: []string{"longitude", "lon", "coordinate", "east-west", "degrees", "gps", "wgs84", "geodesy", "meridian", "idl"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return longitude(f), nil + }, + }) + + AddFuncLookup("longituderange", Info{ + Display: "Longitude Range", + Category: "address", + Description: "Longitude number between the given range (default min=0, max=180)", + Example: "-8.170450", + Output: "float", + Aliases: []string{"longitude bounds", "long range", "east-west range", "geographic bounds", "coordinate range"}, + Keywords: []string{"longitude", "lon", "range", "min", "max", "degrees", "gps", "wgs84", "bounds", "interval"}, + Params: []Param{ + {Field: "min", Display: "Min", Type: "float", Default: "0", Description: "Minimum range"}, + {Field: "max", Display: "Max", Type: "float", Default: "180", Description: "Maximum range"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetFloat64(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetFloat64(m, "max") + if err != nil { + return nil, err + } + return longitudeInRange(f, min, max) + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/animal.go b/vendor/github.com/brianvoe/gofakeit/v7/animal.go new file mode 100644 index 0000000000..afd69f3391 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/animal.go @@ -0,0 +1,192 @@ +package gofakeit + +// PetName will return a random fun pet name +func PetName() string { + return petName(GlobalFaker) +} + +// PetName will return a random fun pet name +func (f *Faker) PetName() string { + return petName(f) +} + +func petName(f *Faker) string { + return getRandValue(f, []string{"animal", "petname"}) +} + +// Animal will return a random animal +func Animal() string { + return animal(GlobalFaker) +} + +// Animal will return a random animal +func (f *Faker) Animal() string { + return animal(f) +} + +func animal(f *Faker) string { + return getRandValue(f, []string{"animal", "animal"}) +} + +// AnimalType will return a random animal type +func AnimalType() string { + return animalType(GlobalFaker) +} + +// AnimalType will return a random animal type +func (f *Faker) AnimalType() string { + return animalType(f) +} + +func animalType(f *Faker) string { + return getRandValue(f, []string{"animal", "type"}) +} + +// FarmAnimal will return a random animal that usually lives on a farm +func FarmAnimal() string { + return farmAnimal(GlobalFaker) +} + +// FarmAnimal will return a random animal that usually lives on a farm +func (f *Faker) FarmAnimal() string { + return farmAnimal(f) +} + +func farmAnimal(f *Faker) string { + return getRandValue(f, []string{"animal", "farm"}) +} + +// Cat will return a random cat breed +func Cat() string { + return cat(GlobalFaker) +} + +// Cat will return a random cat breed +func (f *Faker) Cat() string { + return cat(f) +} + +func cat(f *Faker) string { + return getRandValue(f, []string{"animal", "cat"}) +} + +// Dog will return a random dog breed +func Dog() string { + return dog(GlobalFaker) +} + +// Dog will return a random dog breed +func (f *Faker) Dog() string { + return dog(f) +} + +func dog(f *Faker) string { + return getRandValue(f, []string{"animal", "dog"}) +} + +// Bird will return a random bird species +func Bird() string { + return bird(GlobalFaker) +} + +// Bird will return a random bird species +func (f *Faker) Bird() string { + return bird(f) +} + +func bird(f *Faker) string { + return getRandValue(f, []string{"animal", "bird"}) +} + +func addAnimalLookup() { + AddFuncLookup("petname", Info{ + Display: "Pet Name", + Category: "animal", + Description: "Affectionate nickname given to a pet", + Example: "Ozzy Pawsborne", + Output: "string", + Aliases: []string{"pet nickname", "animal name", "companion name", "friendly name", "affectionate name"}, + Keywords: []string{"pet", "name", "nickname", "affectionate", "animal", "companion", "friendly", "cute", "funny", "playful", "loving", "adorable"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return petName(f), nil + }, + }) + + AddFuncLookup("animal", Info{ + Display: "Animal", + Category: "animal", + Description: "Living creature with the ability to move, eat, and interact with its environment", + Example: "elk", + Output: "string", + Aliases: []string{"wild animal", "living creature", "wildlife species", "animal species", "creature name"}, + Keywords: []string{"animal", "creature", "living", "move", "eat", "environment", "wildlife", "species", "fauna", "beast", "organism", "vertebrate", "invertebrate"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return animal(f), nil + }, + }) + + AddFuncLookup("animaltype", Info{ + Display: "Animal Type", + Category: "animal", + Description: "Type of animal, such as mammals, birds, reptiles, etc.", + Example: "amphibians", + Output: "string", + Aliases: []string{"animal classification", "species type", "taxonomic group", "animal category", "biological class"}, + Keywords: []string{"animal", "type", "mammals", "birds", "reptiles", "amphibians", "classification", "taxonomy", "phylum", "class", "order", "family", "genus", "species"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return animalType(f), nil + }, + }) + + AddFuncLookup("farmanimal", Info{ + Display: "Farm Animal", + Category: "animal", + Description: "Animal name commonly found on a farm", + Example: "Chicken", + Output: "string", + Aliases: []string{"livestock animal", "barnyard animal", "agricultural animal", "domestic animal", "farm livestock"}, + Keywords: []string{"farm", "animal", "livestock", "domestic", "agriculture", "commonly", "cattle", "barnyard", "herd", "poultry", "swine", "sheep", "goat", "horse", "pig"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return farmAnimal(f), nil + }, + }) + + AddFuncLookup("cat", Info{ + Display: "Cat", + Category: "animal", + Description: "Various breeds that define different cats", + Example: "Chausie", + Output: "string", + Aliases: []string{"cat breed", "feline breed", "domestic cat", "pet cat", "kitty breed"}, + Keywords: []string{"cat", "breed", "feline", "pet", "domestic", "various", "persian", "siamese", "maine", "coon", "tabby", "calico", "tuxedo", "kitten"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return cat(f), nil + }, + }) + + AddFuncLookup("dog", Info{ + Display: "Dog", + Category: "animal", + Description: "Various breeds that define different dogs", + Example: "Norwich Terrier", + Output: "string", + Aliases: []string{"dog breed", "canine breed", "domestic dog", "pet dog", "fido breed"}, + Keywords: []string{"dog", "breed", "canine", "pet", "domestic", "various", "labrador", "retriever", "terrier", "shepherd", "bulldog", "poodle", "puppy", "hound"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return dog(f), nil + }, + }) + + AddFuncLookup("bird", Info{ + Display: "Bird", + Category: "animal", + Description: "Distinct species of birds", + Example: "goose", + Output: "string", + Aliases: []string{"bird species", "avian species", "feathered animal", "winged creature", "bird type"}, + Keywords: []string{"bird", "species", "avian", "feather", "wing", "distinct", "sparrow", "eagle", "hawk", "owl", "duck", "goose", "parrot", "finch", "robin"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return bird(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/app.go b/vendor/github.com/brianvoe/gofakeit/v7/app.go new file mode 100644 index 0000000000..44c414f9ca --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/app.go @@ -0,0 +1,102 @@ +package gofakeit + +import ( + "fmt" +) + +// AppName will generate a random app name +func AppName() string { + return appName(GlobalFaker) +} + +// AppName will generate a random app name +func (f *Faker) AppName() string { + return appName(f) +} + +func appName(f *Faker) string { + name := "" + switch number(f, 1, 3) { + case 1: + name = noun(f) + verb(f) + case 2: + name = color(f) + noun(f) + case 3: + name = animal(f) + verb(f) + } + + return title(name) +} + +// AppVersion will generate a random app version +func AppVersion() string { + return appVersion(GlobalFaker) +} + +// AppVersion will generate a random app version +func (f *Faker) AppVersion() string { + return appVersion(f) +} + +func appVersion(f *Faker) string { + return fmt.Sprintf("%d.%d.%d", number(f, 1, 5), number(f, 1, 20), number(f, 1, 20)) +} + +// AppAuthor will generate a random company or person name +func AppAuthor() string { + return appAuthor(GlobalFaker) +} + +// AppAuthor will generate a random company or person name +func (f *Faker) AppAuthor() string { + return appAuthor(f) +} + +func appAuthor(f *Faker) string { + if boolFunc(f) { + return name(f) + } + + return company(f) +} + +func addAppLookup() { + AddFuncLookup("appname", Info{ + Display: "App Name", + Category: "app", + Description: "Software program designed for a specific purpose or task on a computer or mobile device", + Example: "Parkrespond", + Output: "string", + Aliases: []string{"software name", "application name", "mobile app name", "program title", "app title"}, + Keywords: []string{"app", "name", "software", "program", "application", "mobile", "device", "computer", "ios", "android", "desktop", "web", "platform", "title"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return appName(f), nil + }, + }) + + AddFuncLookup("appversion", Info{ + Display: "App Version", + Category: "app", + Description: "Particular release of an application in Semantic Versioning format", + Example: "1.12.14", + Output: "string", + Aliases: []string{"semantic version", "app release", "software version", "version number", "release version"}, + Keywords: []string{"app", "version", "release", "semantic", "versioning", "application", "major", "minor", "patch", "build", "number", "format", "tag"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return appVersion(f), nil + }, + }) + + AddFuncLookup("appauthor", Info{ + Display: "App Author", + Category: "app", + Description: "Person or group creating and developing an application", + Example: "Qado Energy, Inc.", + Output: "string", + Aliases: []string{"app developer", "software author", "application creator", "program developer", "app creator"}, + Keywords: []string{"app", "author", "developer", "creator", "person", "company", "group", "creating", "programmer", "coder", "engineer", "team", "organization", "studio", "publisher"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return appAuthor(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/auth.go b/vendor/github.com/brianvoe/gofakeit/v7/auth.go new file mode 100644 index 0000000000..a8db91d59e --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/auth.go @@ -0,0 +1,188 @@ +package gofakeit + +// Username will generate a random username based upon picking a random lastname and random numbers at the end +func Username() string { + return username(GlobalFaker) +} + +// Username will generate a random username based upon picking a random lastname and random numbers at the end +func (f *Faker) Username() string { + return username(f) +} + +func username(f *Faker) string { + return getRandValue(f, []string{"person", "last"}) + replaceWithNumbers(f, "####") +} + +// Password will generate a random password. +// Minimum number length of 5 if less than. +func Password(lower bool, upper bool, numeric bool, special bool, space bool, num int) string { + return password(GlobalFaker, lower, upper, numeric, special, space, num) +} + +// Password will generate a random password. +// Minimum number length of 5 if less than. +func (f *Faker) Password(lower bool, upper bool, numeric bool, special bool, space bool, num int) string { + return password(f, lower, upper, numeric, special, space, num) +} + +func password(f *Faker, lower bool, upper bool, numeric bool, special bool, space bool, num int) string { + // Make sure the num minimum is at least 5 + if num < 5 { + num = 5 + } + + // Setup weights + items := make([]any, 0) + weights := make([]float32, 0) + if lower { + items = append(items, "l") + weights = append(weights, 4) + } + if upper { + items = append(items, "u") + weights = append(weights, 4) + } + if numeric { + items = append(items, "n") + weights = append(weights, 3) + } + if special { + items = append(items, "e") + weights = append(weights, 2) + } + if space { + items = append(items, "a") + weights = append(weights, 1) + } + + // If no items are selected then default to lower, upper, numeric + if len(items) == 0 { + items = append(items, "l", "u", "n") + weights = append(weights, 4, 4, 3) + } + + // Create byte slice + b := make([]byte, num) + + for i := 0; i <= num-1; i++ { + // Run weighted + weight, _ := weighted(f, items, weights) + + switch weight.(string) { + case "l": + b[i] = lowerStr[f.Int64()%int64(len(lowerStr))] + case "u": + b[i] = upperStr[f.Int64()%int64(len(upperStr))] + case "n": + b[i] = numericStr[f.Int64()%int64(len(numericStr))] + case "e": + b[i] = specialSafeStr[f.Int64()%int64(len(specialSafeStr))] + case "a": + b[i] = spaceStr[f.Int64()%int64(len(spaceStr))] + } + } + + // Shuffle bytes + for i := range b { + j := f.IntN(i + 1) + b[i], b[j] = b[j], b[i] + } + + // Replace first or last character if it's a space, and other options are available + if b[0] == ' ' { + b[0] = password(f, lower, upper, numeric, special, false, 1)[0] + } + if b[len(b)-1] == ' ' { + b[len(b)-1] = password(f, lower, upper, numeric, special, false, 1)[0] + } + + return string(b) +} + +func addAuthLookup() { + AddFuncLookup("username", Info{ + Display: "Username", + Category: "auth", + Description: "Unique identifier assigned to a user for accessing an account or system", + Example: "Daniel1364", + Output: "string", + Aliases: []string{ + "user name", + "login name", + "account username", + "account login", + "screen name", + "user handle", + }, + Keywords: []string{ + "username", "login", "handle", "userid", "screenname", + "user", "account", "credential", "signin", "alias", "profile", "uid", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return username(f), nil + }, + }) + + AddFuncLookup("password", Info{ + Display: "Password", + Category: "auth", + Description: "Secret word or phrase used to authenticate access to a system or account", + Example: "EEP+wwpk 4lU-eHNXlJZ4n K9%v&TZ9e", + Output: "string", + Aliases: []string{ + "user password", + "account password", + "login password", + "secret phrase", + "auth secret", + }, + Keywords: []string{ + "password", "passphrase", "pwd", "secret", + "credential", "authentication", "auth", + "security", "signin", "login", + "access", "key", "token", "hash", "encryption", + }, + Params: []Param{ + {Field: "lower", Display: "Lower", Type: "bool", Default: "true", Description: "Whether or not to add lower case characters"}, + {Field: "upper", Display: "Upper", Type: "bool", Default: "true", Description: "Whether or not to add upper case characters"}, + {Field: "numeric", Display: "Numeric", Type: "bool", Default: "true", Description: "Whether or not to add numeric characters"}, + {Field: "special", Display: "Special", Type: "bool", Default: "true", Description: "Whether or not to add special characters"}, + {Field: "space", Display: "Space", Type: "bool", Default: "false", Description: "Whether or not to add spaces"}, + {Field: "length", Display: "Length", Type: "int", Default: "12", Description: "Number of characters in password"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + lower, err := info.GetBool(m, "lower") + if err != nil { + return nil, err + } + + upper, err := info.GetBool(m, "upper") + if err != nil { + return nil, err + } + + numeric, err := info.GetBool(m, "numeric") + if err != nil { + return nil, err + } + + special, err := info.GetBool(m, "special") + if err != nil { + return nil, err + } + + space, err := info.GetBool(m, "space") + if err != nil { + return nil, err + } + + length, err := info.GetInt(m, "length") + if err != nil { + return nil, err + } + + return password(f, lower, upper, numeric, special, space, length), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/beer.go b/vendor/github.com/brianvoe/gofakeit/v7/beer.go new file mode 100644 index 0000000000..9b407d7082 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/beer.go @@ -0,0 +1,223 @@ +package gofakeit + +import ( + "strconv" +) + +// BeerName will return a random beer name +func BeerName() string { + return beerName(GlobalFaker) +} + +// BeerName will return a random beer name +func (f *Faker) BeerName() string { + return beerName(f) +} + +func beerName(f *Faker) string { + return getRandValue(f, []string{"beer", "name"}) +} + +// BeerStyle will return a random beer style +func BeerStyle() string { + return beerStyle(GlobalFaker) +} + +// BeerStyle will return a random beer style +func (f *Faker) BeerStyle() string { + return beerStyle(f) +} + +func beerStyle(f *Faker) string { + return getRandValue(f, []string{"beer", "style"}) +} + +// BeerHop will return a random beer hop +func BeerHop() string { + return beerHop(GlobalFaker) +} + +// BeerHop will return a random beer hop +func (f *Faker) BeerHop() string { + return beerHop(f) +} + +func beerHop(f *Faker) string { + return getRandValue(f, []string{"beer", "hop"}) +} + +// BeerYeast will return a random beer yeast +func BeerYeast() string { + return beerYeast(GlobalFaker) +} + +// BeerYeast will return a random beer yeast +func (f *Faker) BeerYeast() string { + return beerYeast(f) +} + +func beerYeast(f *Faker) string { + return getRandValue(f, []string{"beer", "yeast"}) +} + +// BeerMalt will return a random beer malt +func BeerMalt() string { + return beerMalt(GlobalFaker) +} + +// BeerMalt will return a random beer malt +func (f *Faker) BeerMalt() string { + return beerMalt(f) +} + +func beerMalt(f *Faker) string { + return getRandValue(f, []string{"beer", "malt"}) +} + +// BeerAlcohol will return a random beer alcohol level between 2.0 and 10.0 +func BeerAlcohol() string { + return beerAlcohol(GlobalFaker) +} + +// BeerAlcohol will return a random beer alcohol level between 2.0 and 10.0 +func (f *Faker) BeerAlcohol() string { + return beerAlcohol(f) +} + +func beerAlcohol(f *Faker) string { + return strconv.FormatFloat(float64Range(f, 2.0, 10.0), 'f', 1, 64) + "%" +} + +// BeerIbu will return a random beer ibu value between 10 and 100 +func BeerIbu() string { + return beerIbu(GlobalFaker) +} + +// BeerIbu will return a random beer ibu value between 10 and 100 +func (f *Faker) BeerIbu() string { + return beerIbu(f) +} + +func beerIbu(f *Faker) string { + return strconv.Itoa(randIntRange(f, 10, 100)) + " IBU" +} + +// BeerBlg will return a random beer blg between 5.0 and 20.0 +func BeerBlg() string { + return beerBlg(GlobalFaker) +} + +// BeerBlg will return a random beer blg between 5.0 and 20.0 +func (f *Faker) BeerBlg() string { + return beerBlg(f) +} + +func beerBlg(f *Faker) string { + return strconv.FormatFloat(float64Range(f, 5.0, 20.0), 'f', 1, 64) + "°Blg" +} + +func addBeerLookup() { + AddFuncLookup("beername", Info{ + Display: "Beer Name", + Category: "beer", + Description: "Specific brand or variety of beer", + Example: "Duvel", + Output: "string", + Aliases: []string{"brand", "brewery", "label", "craft", "microbrew"}, + Keywords: []string{"beer", "name", "variety", "specific", "alcoholic", "beverage", "lager", "ale", "stout", "pilsner", "ipa"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerName(f), nil + }, + }) + + AddFuncLookup("beerstyle", Info{ + Display: "Beer Style", + Category: "beer", + Description: "Distinct characteristics and flavors of beer", + Example: "European Amber Lager", + Output: "string", + Aliases: []string{"style", "type", "category", "classification", "variety"}, + Keywords: []string{"beer", "characteristics", "flavors", "distinct", "lager", "ale", "stout", "pilsner", "porter", "wheat", "amber"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerStyle(f), nil + }, + }) + + AddFuncLookup("beerhop", Info{ + Display: "Beer Hop", + Category: "beer", + Description: "The flower used in brewing to add flavor, aroma, and bitterness to beer", + Example: "Glacier", + Output: "string", + Aliases: []string{"hop", "flower", "plant", "cone", "vine"}, + Keywords: []string{"beer", "brewing", "flavor", "aroma", "bitterness", "ingredient", "humulus", "lupulus", "cascade", "citra", "mosaic"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerHop(f), nil + }, + }) + + AddFuncLookup("beeryeast", Info{ + Display: "Beer Yeast", + Category: "beer", + Description: "Microorganism used in brewing to ferment sugars, producing alcohol and carbonation in beer", + Example: "1388 - Belgian Strong Ale", + Output: "string", + Aliases: []string{"yeast", "fungus", "microorganism", "culture", "strain"}, + Keywords: []string{"beer", "brewing", "ferment", "sugars", "alcohol", "carbonation", "ingredient", "saccharomyces", "cerevisiae", "belgian", "ale"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerYeast(f), nil + }, + }) + + AddFuncLookup("beermalt", Info{ + Display: "Beer Malt", + Category: "beer", + Description: "Processed barley or other grains, provides sugars for fermentation and flavor to beer", + Example: "Munich", + Output: "string", + Aliases: []string{"malt", "barley", "grain", "cereal", "kernel"}, + Keywords: []string{"beer", "sugars", "fermentation", "flavor", "processed", "ingredient", "munich", "pale", "crystal", "roasted", "wheat", "rye"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerMalt(f), nil + }, + }) + + AddFuncLookup("beeralcohol", Info{ + Display: "Beer Alcohol", + Category: "beer", + Description: "Measures the alcohol content in beer", + Example: "2.7%", + Output: "string", + Aliases: []string{"alcohol", "abv", "strength", "proof", "percentage"}, + Keywords: []string{"beer", "content", "measure", "volume", "concentration", "level", "degree", "potency"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerAlcohol(f), nil + }, + }) + + AddFuncLookup("beeribu", Info{ + Display: "Beer IBU", + Category: "beer", + Description: "Scale measuring bitterness of beer from hops", + Example: "29 IBU", + Output: "string", + Aliases: []string{"ibu", "bitterness", "scale", "units", "measurement"}, + Keywords: []string{"beer", "hops", "measuring", "international", "bittering", "alpha", "acid", "level", "intensity"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerIbu(f), nil + }, + }) + + AddFuncLookup("beerblg", Info{ + Display: "Beer BLG", + Category: "beer", + Description: "Scale indicating the concentration of extract in worts", + Example: "6.4°Blg", + Output: "string", + Aliases: []string{"blg", "density", "gravity", "extract", "concentration"}, + Keywords: []string{"beer", "worts", "scale", "indicating", "balling", "plato", "sugar", "soluble", "solids", "degree"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return beerBlg(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/book.go b/vendor/github.com/brianvoe/gofakeit/v7/book.go new file mode 100644 index 0000000000..aa870e68e8 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/book.go @@ -0,0 +1,96 @@ +package gofakeit + +func BookTitle() string { return bookTitle(GlobalFaker) } + +func (f *Faker) BookTitle() string { return bookTitle(f) } + +func bookTitle(f *Faker) string { return getRandValue(f, []string{"book", "title"}) } + +func BookAuthor() string { return bookAuthor(GlobalFaker) } + +func (f *Faker) BookAuthor() string { return bookAuthor(f) } + +func bookAuthor(f *Faker) string { return getRandValue(f, []string{"book", "author"}) } + +func BookGenre() string { return bookGenre(GlobalFaker) } + +func (f *Faker) BookGenre() string { return bookGenre(f) } + +func bookGenre(f *Faker) string { return getRandValue(f, []string{"book", "genre"}) } + +type BookInfo struct { + Title string `json:"title" xml:"name"` + Author string `json:"author" xml:"author"` + Genre string `json:"genre" xml:"genre"` +} + +func Book() *BookInfo { return book(GlobalFaker) } + +func (f *Faker) Book() *BookInfo { return book(f) } + +func book(f *Faker) *BookInfo { + return &BookInfo{ + Title: bookTitle(f), + Author: bookAuthor(f), + Genre: bookGenre(f), + } +} + +func addBookLookup() { + AddFuncLookup("book", Info{ + Display: "Book", + Category: "book", + Description: "Written or printed work consisting of pages bound together, covering various subjects or stories", + Example: `{ + "title": "Anna Karenina", + "author": "Toni Morrison", + "genre": "Thriller" +}`, + Output: "map[string]string", + ContentType: "application/json", + Aliases: []string{"printed", "pages", "bound", "subjects", "stories", "literature", "text"}, + Keywords: []string{"book", "written", "work", "consisting", "anna", "karenina", "toni", "morrison", "thriller"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return book(f), nil + }, + }) + + AddFuncLookup("booktitle", Info{ + Display: "Title", + Category: "book", + Description: "The specific name given to a book", + Example: "Hamlet", + Output: "string", + Aliases: []string{"title", "name", "specific", "given", "heading"}, + Keywords: []string{"book", "identification", "hamlet", "naming", "designation", "label", "caption"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return bookTitle(f), nil + }, + }) + + AddFuncLookup("bookauthor", Info{ + Display: "Author", + Category: "book", + Description: "The individual who wrote or created the content of a book", + Example: "Mark Twain", + Output: "string", + Aliases: []string{"author", "writer", "creator", "individual", "content", "literary"}, + Keywords: []string{"book", "wrote", "created", "mark", "twain", "composer", "originator", "penned"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return bookAuthor(f), nil + }, + }) + + AddFuncLookup("bookgenre", Info{ + Display: "Genre", + Category: "book", + Description: "Category or type of book defined by its content, style, or form", + Example: "Adventure", + Output: "string", + Aliases: []string{"type", "content", "style", "form", "literature", "classification"}, + Keywords: []string{"book", "category", "defined", "adventure", "fiction", "non-fiction", "mystery", "romance", "sci-fi"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return bookGenre(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/car.go b/vendor/github.com/brianvoe/gofakeit/v7/car.go new file mode 100644 index 0000000000..c48d71f0ea --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/car.go @@ -0,0 +1,169 @@ +package gofakeit + +// CarInfo is a struct dataset of all car information +type CarInfo struct { + Type string `json:"type" xml:"type"` + Fuel string `json:"fuel" xml:"fuel"` + Transmission string `json:"transmission" xml:"transmission"` + Brand string `json:"brand" xml:"brand"` + Model string `json:"model" xml:"model"` + Year int `json:"year" xml:"year"` +} + +// Car will generate a struct with car information +func Car() *CarInfo { return car(GlobalFaker) } + +// Car will generate a struct with car information +func (f *Faker) Car() *CarInfo { return car(f) } + +func car(f *Faker) *CarInfo { + return &CarInfo{ + Type: carType(f), + Fuel: carFuelType(f), + Transmission: carTransmissionType(f), + Brand: carMaker(f), + Model: carModel(f), + Year: year(f), + } +} + +// CarType will generate a random car type string +func CarType() string { return carType(GlobalFaker) } + +// CarType will generate a random car type string +func (f *Faker) CarType() string { return carType(f) } + +func carType(f *Faker) string { return getRandValue(f, []string{"car", "type"}) } + +// CarFuelType will return a random fuel type +func CarFuelType() string { return carFuelType(GlobalFaker) } + +// CarFuelType will return a random fuel type +func (f *Faker) CarFuelType() string { return carFuelType(f) } + +func carFuelType(f *Faker) string { return getRandValue(f, []string{"car", "fuel_type"}) } + +// CarTransmissionType will return a random transmission type +func CarTransmissionType() string { return carTransmissionType(GlobalFaker) } + +// CarTransmissionType will return a random transmission type +func (f *Faker) CarTransmissionType() string { return carTransmissionType(f) } + +func carTransmissionType(f *Faker) string { + return getRandValue(f, []string{"car", "transmission_type"}) +} + +// CarMaker will return a random car maker +func CarMaker() string { return carMaker(GlobalFaker) } + +// CarMaker will return a random car maker +func (f *Faker) CarMaker() string { return carMaker(f) } + +func carMaker(f *Faker) string { return getRandValue(f, []string{"car", "maker"}) } + +// CarModel will return a random car model +func CarModel() string { return carModel(GlobalFaker) } + +// CarModel will return a random car model +func (f *Faker) CarModel() string { return carModel(f) } + +func carModel(f *Faker) string { return getRandValue(f, []string{"car", "model"}) } + +func addCarLookup() { + AddFuncLookup("car", Info{ + Display: "Car", + Category: "car", + Description: "Wheeled motor vehicle used for transportation", + Example: `{ + "type": "Passenger car mini", + "fuel": "Gasoline", + "transmission": "Automatic", + "brand": "Fiat", + "model": "Freestyle Fwd", + "year": 1991 +}`, + Output: "map[string]any", + ContentType: "application/json", + Aliases: []string{"vehicle", "automobile", "transportation", "motor", "wheeled"}, + Keywords: []string{"car", "used", "passenger", "mini", "gasoline", "automatic", "fiat", "freestyle", "fwd"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return car(f), nil + }, + }) + + AddFuncLookup("cartype", Info{ + Display: "Car Type", + Category: "car", + Description: "Classification of cars based on size, use, or body style", + Example: "Passenger car mini", + Output: "string", + Aliases: []string{"classification", "size", "body", "style", "vehicle", "category"}, + Keywords: []string{"car", "based", "passenger", "mini", "suv", "sedan", "hatchback", "convertible", "coupe"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return carType(f), nil + }, + }) + + AddFuncLookup("carfueltype", Info{ + Display: "Car Fuel Type", + Category: "car", + Description: "Type of energy source a car uses", + Example: "CNG", + Output: "string", + Aliases: []string{"energy", "source", "power", "vehicle"}, + Keywords: []string{"car", "fuel", "uses", "cng", "gasoline", "diesel", "electric", "hybrid", "hydrogen", "ethanol"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return carFuelType(f), nil + }, + }) + + AddFuncLookup("cartransmissiontype", Info{ + Display: "Car Transmission Type", + Category: "car", + Description: "Mechanism a car uses to transmit power from the engine to the wheels", + Example: "Manual", + Output: "string", + Aliases: []string{"mechanism", "power", "engine", "wheels", "vehicle"}, + Keywords: []string{"car", "transmission", "transmit", "manual", "automatic", "cvt", "semi-automatic", "gearbox", "clutch"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return carTransmissionType(f), nil + }, + }) + + AddFuncLookup("carmaker", Info{ + Display: "Car Maker", + Category: "car", + Description: "Company or brand that manufactures and designs cars", + Example: "Nissan", + Output: "string", + Aliases: []string{"company", "brand", "manufacturer", "designer", "vehicle", "producer"}, + Keywords: []string{"car", "maker", "manufactures", "nissan", "toyota", "honda", "ford", "bmw", "mercedes", "audi"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return carMaker(f), nil + }, + }) + + AddFuncLookup("carmodel", Info{ + Display: "Car Model", + Category: "car", + Description: "Specific design or version of a car produced by a manufacturer", + Example: "Aveo", + Output: "string", + Aliases: []string{ + "vehicle model", + "auto model", + "car type", + "car version", + "automobile model", + }, + Keywords: []string{ + "car", "model", "vehicle", "auto", "automobile", + "type", "edition", "variant", "series", + "sedan", "suv", "hatchback", "coupe", "convertible", + "civic", "camry", "accord", "corolla", "mustang", "prius", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return carModel(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/celebrity.go b/vendor/github.com/brianvoe/gofakeit/v7/celebrity.go new file mode 100644 index 0000000000..74b525abb5 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/celebrity.go @@ -0,0 +1,68 @@ +package gofakeit + +// CelebrityActor will generate a random celebrity actor +func CelebrityActor() string { return celebrityActor(GlobalFaker) } + +// CelebrityActor will generate a random celebrity actor +func (f *Faker) CelebrityActor() string { return celebrityActor(f) } + +func celebrityActor(f *Faker) string { return getRandValue(f, []string{"celebrity", "actor"}) } + +// CelebrityBusiness will generate a random celebrity business person +func CelebrityBusiness() string { return celebrityBusiness(GlobalFaker) } + +// CelebrityBusiness will generate a random celebrity business person +func (f *Faker) CelebrityBusiness() string { return celebrityBusiness(f) } + +func celebrityBusiness(f *Faker) string { + return getRandValue(f, []string{"celebrity", "business"}) +} + +// CelebritySport will generate a random celebrity sport person +func CelebritySport() string { return celebritySport(GlobalFaker) } + +// CelebritySport will generate a random celebrity sport person +func (f *Faker) CelebritySport() string { return celebritySport(f) } + +func celebritySport(f *Faker) string { return getRandValue(f, []string{"celebrity", "sport"}) } + +func addCelebrityLookup() { + AddFuncLookup("celebrityactor", Info{ + Display: "Celebrity Actor", + Category: "celebrity", + Description: "Famous person known for acting in films, television, or theater", + Example: "Brad Pitt", + Output: "string", + Aliases: []string{"actor", "famous", "films", "television", "theater", "entertainment"}, + Keywords: []string{"celebrity", "known", "brad", "pitt", "hollywood", "movie", "star", "performer", "artist"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return celebrityActor(f), nil + }, + }) + + AddFuncLookup("celebritybusiness", Info{ + Display: "Celebrity Business", + Category: "celebrity", + Description: "High-profile individual known for significant achievements in business or entrepreneurship", + Example: "Elon Musk", + Output: "string", + Aliases: []string{"business", "entrepreneur", "high-profile", "achievements", "executive"}, + Keywords: []string{"celebrity", "significant", "elon", "musk", "ceo", "founder", "investor", "tycoon", "magnate"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return celebrityBusiness(f), nil + }, + }) + + AddFuncLookup("celebritysport", Info{ + Display: "Celebrity Sport", + Category: "celebrity", + Description: "Famous athlete known for achievements in a particular sport", + Example: "Michael Phelps", + Output: "string", + Aliases: []string{"athlete", "famous", "achievements", "competition", "athletic", "player"}, + Keywords: []string{"celebrity", "particular", "michael", "phelps", "olympics", "champion", "medalist", "record", "holder"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return celebritySport(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/color.go b/vendor/github.com/brianvoe/gofakeit/v7/color.go new file mode 100644 index 0000000000..d85c88b069 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/color.go @@ -0,0 +1,127 @@ +package gofakeit + +import ( + "github.com/brianvoe/gofakeit/v7/data" +) + +// Color will generate a random color string +func Color() string { return color(GlobalFaker) } + +// Color will generate a random color string +func (f *Faker) Color() string { return color(f) } + +func color(f *Faker) string { return getRandValue(f, []string{"color", "full"}) } + +// NiceColor will generate a random safe color string +func NiceColors() []string { return niceColors(GlobalFaker) } + +// NiceColor will generate a random safe color string +func (f *Faker) NiceColors() []string { return niceColors(f) } + +func niceColors(f *Faker) []string { + return data.ColorsNice[randIntRange(f, 0, len(data.ColorsNice)-1)] +} + +// SafeColor will generate a random safe color string +func SafeColor() string { return safeColor(GlobalFaker) } + +// SafeColor will generate a random safe color string +func (f *Faker) SafeColor() string { return safeColor(f) } + +func safeColor(f *Faker) string { return getRandValue(f, []string{"color", "safe"}) } + +// HexColor will generate a random hexadecimal color string +func HexColor() string { return hexColor(GlobalFaker) } + +// HexColor will generate a random hexadecimal color string +func (f *Faker) HexColor() string { return hexColor(f) } + +func hexColor(f *Faker) string { + color := make([]byte, 6) + hashQuestion := []byte("?#") + for i := 0; i < 6; i++ { + color[i] = hashQuestion[f.IntN(2)] + } + + return "#" + replaceWithHexLetters(f, replaceWithNumbers(f, string(color))) +} + +// RGBColor will generate a random int slice color +func RGBColor() []int { return rgbColor(GlobalFaker) } + +// RGBColor will generate a random int slice color +func (f *Faker) RGBColor() []int { return rgbColor(f) } + +func rgbColor(f *Faker) []int { + return []int{randIntRange(f, 0, 255), randIntRange(f, 0, 255), randIntRange(f, 0, 255)} +} + +func addColorLookup() { + AddFuncLookup("color", Info{ + Display: "Color", + Category: "color", + Description: "Hue seen by the eye, returns the name of the color like red or blue", + Example: "MediumOrchid", + Output: "string", + Aliases: []string{"color name", "hue name", "visual color", "shade name", "color label"}, + Keywords: []string{"color", "red", "blue", "green", "yellow", "purple", "orange", "pink", "hue", "chroma", "shade", "tone", "css", "name"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return color(f), nil + }, + }) + + AddFuncLookup("nicecolors", Info{ + Display: "Nice Colors", + Category: "color", + Description: "Attractive and appealing combinations of colors, returns a list of color hex codes", + Example: `["#cfffdd","#b4dec1","#5c5863","#a85163","#ff1f4c"]`, + Output: "[]string", + ContentType: "application/json", + Aliases: []string{"color palette", "nice palette", "harmonious colors", "aesthetic palette", "design colors"}, + Keywords: []string{"nice", "colors", "palette", "array", "hex", "design", "aesthetic", "beautiful", "harmonious", "scheme", "ui", "ux"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return niceColors(f), nil + }, + }) + + AddFuncLookup("safecolor", Info{ + Display: "Safe Color", + Category: "color", + Description: "Colors displayed consistently on different web browsers and devices", + Example: "black", + Output: "string", + Aliases: []string{"web safe color", "browser safe", "cross platform color", "universal color", "standard color"}, + Keywords: []string{"safe", "color", "cross-platform", "css", "html", "compatible", "browser", "device", "universal", "stable", "standard"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return safeColor(f), nil + }, + }) + + AddFuncLookup("hexcolor", Info{ + Display: "Hex Color", + Category: "color", + Description: "Six-digit hexadecimal code representing a color in the RGB color model", + Example: "#a99fb4", + Output: "string", + Aliases: []string{"hex color code", "css hex", "html hex", "web hex", "hexadecimal color"}, + Keywords: []string{"hex", "hexcolor", "color", "rgb", "six-digit", "web", "css", "html", "design", "hexadecimal", "hash", "code"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hexColor(f), nil + }, + }) + + AddFuncLookup("rgbcolor", Info{ + Display: "RGB Color", + Category: "color", + Description: "Color defined by red, green, and blue light values", + Example: "[85, 224, 195]", + Output: "[]int", + ContentType: "application/json", + Aliases: []string{"rgb triplet", "rgb array", "rgb value", "red green blue", "rgb color code"}, + Keywords: []string{"rgb", "color", "red", "green", "blue", "triplet", "digital", "screen", "display", "primary", "additive", "value", "css"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return rgbColor(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/company.go b/vendor/github.com/brianvoe/gofakeit/v7/company.go new file mode 100644 index 0000000000..f0ad90ce76 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/company.go @@ -0,0 +1,357 @@ +package gofakeit + +// Company will generate a random company name string +func Company() string { return company(GlobalFaker) } + +// Company will generate a random company name string +func (f *Faker) Company() string { return company(f) } + +func company(f *Faker) string { return getRandValue(f, []string{"company", "name"}) } + +// CompanySuffix will generate a random company suffix string +func CompanySuffix() string { return companySuffix(GlobalFaker) } + +// CompanySuffix will generate a random company suffix string +func (f *Faker) CompanySuffix() string { return companySuffix(f) } + +func companySuffix(f *Faker) string { return getRandValue(f, []string{"company", "suffix"}) } + +// Blurb will generate a random company blurb string +func Blurb() string { return blurb(GlobalFaker) } + +func (f *Faker) Blurb() string { return blurb(f) } + +func blurb(f *Faker) string { return getRandValue(f, []string{"company", "blurb"}) } + +// BuzzWord will generate a random company buzz word string +func BuzzWord() string { return buzzWord(GlobalFaker) } + +// BuzzWord will generate a random company buzz word string +func (f *Faker) BuzzWord() string { return buzzWord(f) } + +func buzzWord(f *Faker) string { return getRandValue(f, []string{"company", "buzzwords"}) } + +// BS will generate a random company bs string +func BS() string { return bs(GlobalFaker) } + +// BS will generate a random company bs string +func (f *Faker) BS() string { return bs(f) } + +func bs(f *Faker) string { return getRandValue(f, []string{"company", "bs"}) } + +// JobInfo is a struct of job information +type JobInfo struct { + Company string `json:"company" xml:"company"` + Title string `json:"title" xml:"title"` + Descriptor string `json:"descriptor" xml:"descriptor"` + Level string `json:"level" xml:"level"` +} + +// Job will generate a struct with random job information +func Job() *JobInfo { return job(GlobalFaker) } + +// Job will generate a struct with random job information +func (f *Faker) Job() *JobInfo { return job(f) } + +func job(f *Faker) *JobInfo { + return &JobInfo{ + Company: company(f), + Title: jobTitle(f), + Descriptor: jobDescriptor(f), + Level: jobLevel(f), + } +} + +// JobTitle will generate a random job title string +func JobTitle() string { return jobTitle(GlobalFaker) } + +// JobTitle will generate a random job title string +func (f *Faker) JobTitle() string { return jobTitle(f) } + +func jobTitle(f *Faker) string { return getRandValue(f, []string{"job", "title"}) } + +// JobDescriptor will generate a random job descriptor string +func JobDescriptor() string { return jobDescriptor(GlobalFaker) } + +// JobDescriptor will generate a random job descriptor string +func (f *Faker) JobDescriptor() string { return jobDescriptor(f) } + +func jobDescriptor(f *Faker) string { return getRandValue(f, []string{"job", "descriptor"}) } + +// JobLevel will generate a random job level string +func JobLevel() string { return jobLevel(GlobalFaker) } + +// JobLevel will generate a random job level string +func (f *Faker) JobLevel() string { return jobLevel(f) } + +func jobLevel(f *Faker) string { return getRandValue(f, []string{"job", "level"}) } + +// Slogan will generate a random company slogan +func Slogan() string { return slogan(GlobalFaker) } + +// Slogan will generate a random company slogan +func (f *Faker) Slogan() string { return slogan(f) } + +// Slogan will generate a random company slogan +func slogan(f *Faker) string { + slogan := "" + var sloganStyle = number(f, 0, 2) + switch sloganStyle { + // Noun. Buzzword! + case 0: + slogan = getRandValue(f, []string{"company", "blurb"}) + ". " + getRandValue(f, []string{"company", "buzzwords"}) + "!" + // Buzzword Noun, Buzzword Noun. + case 1: + slogan = getRandValue(f, []string{"company", "buzzwords"}) + " " + getRandValue(f, []string{"company", "blurb"}) + ", " + getRandValue(f, []string{"company", "buzzwords"}) + " " + getRandValue(f, []string{"company", "blurb"}) + "." + // Buzzword bs Noun, Buzzword. + case 2: + slogan = getRandValue(f, []string{"company", "buzzwords"}) + " " + getRandValue(f, []string{"company", "bs"}) + " " + getRandValue(f, []string{"company", "blurb"}) + ", " + getRandValue(f, []string{"company", "buzzwords"}) + "." + } + return slogan +} + +func addCompanyLookup() { + AddFuncLookup("company", Info{ + Display: "Company", + Category: "company", + Description: "Designated official name of a business or organization", + Example: "Moen, Pagac and Wuckert", + Output: "string", + Aliases: []string{ + "business name", + "company name", + "organization name", + "corporate name", + "legal entity", + }, + Keywords: []string{ + "company", "business", "organization", "corporation", + "enterprise", "firm", "entity", "brand", + "employer", "vendor", "partner", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return company(f), nil + }, + }) + + AddFuncLookup("companysuffix", Info{ + Display: "Company Suffix", + Category: "company", + Description: "Suffix at the end of a company name, indicating business structure, like 'Inc.' or 'LLC'", + Example: "Inc", + Output: "string", + Aliases: []string{ + "business suffix", + "legal suffix", + "company ending", + "corporate suffix", + "entity suffix", + }, + Keywords: []string{ + "suffix", "ending", "company", "business", "entity", + "inc", "incorporated", "llc", "ltd", "limited", + "corp", "corporation", "plc", "gmbh", "sarl", + "legal", "structure", "designation", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return companySuffix(f), nil + }, + }) + + AddFuncLookup("bs", Info{ + Display: "BS", + Category: "company", + Description: "Random bs company word", + Example: "front-end", + Output: "string", + Aliases: []string{ + "business jargon", + "corporate jargon", + "marketing buzzword", + "tech buzzword", + "consulting speak", + }, + Keywords: []string{ + "bs", "jargon", "buzzwords", "synergy", "leverage", + "disrupt", "innovate", "scalable", "agile", "optimize", + "pipeline", "roadmap", "vision", "strategy", "corporate", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return bs(f), nil + }, + }) + + AddFuncLookup("blurb", Info{ + Display: "Blurb", + Category: "company", + Description: "Brief description or summary of a company's purpose, products, or services", + Example: "word", + Output: "string", + Aliases: []string{ + "company blurb", + "company summary", + "company description", + "short overview", + "about text", + }, + Keywords: []string{ + "blurb", "summary", "overview", "description", + "company", "profile", "about", "intro", + "purpose", "mission", "vision", "statement", + "services", "products", "offerings", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return blurb(f), nil + }, + }) + + AddFuncLookup("buzzword", Info{ + Display: "Buzzword", + Category: "company", + Description: "Trendy or overused term often used in business to sound impressive", + Example: "disintermediate", + Output: "string", + Aliases: []string{ + "business buzzword", + "corporate buzzword", + "trendy term", + "catchphrase", + "marketing phrase", + }, + Keywords: []string{ + "buzzword", "jargon", "hype", "trend", "phrase", + "term", "corporate", "management", "marketing", + "innovation", "paradigm", "disruptive", "visionary", + "fashionable", "impressive", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return buzzWord(f), nil + }, + }) + + AddFuncLookup("job", Info{ + Display: "Job", + Category: "company", + Description: "Position or role in employment, involving specific tasks and responsibilities", + Example: `{ + "company": "ClearHealthCosts", + "title": "Agent", + "descriptor": "Future", + "level": "Tactics" +}`, + Output: "map[string]string", + ContentType: "application/json", + Aliases: []string{ + "job role", + "job position", + "employment role", + "work role", + "career role", + "occupation role", + }, + Keywords: []string{ + "job", "role", "position", "employment", "work", + "career", "occupation", "profession", "title", + "responsibilities", "tasks", "duties", + "staff", "employee", "hiring", "positioning", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return job(f), nil + }, + }) + + AddFuncLookup("jobtitle", Info{ + Display: "Job Title", + Category: "company", + Description: "Specific title for a position or role within a company or organization", + Example: "Director", + Output: "string", + Aliases: []string{ + "job designation", + "position title", + "role title", + "employment title", + "official title", + }, + Keywords: []string{ + "job", "title", "designation", "position", "role", + "occupation", "profession", "career", + "company", "organization", "staff", "employee", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return jobTitle(f), nil + }, + }) + + AddFuncLookup("jobdescriptor", Info{ + Display: "Job Descriptor", + Category: "company", + Description: "Word used to describe the duties, requirements, and nature of a job", + Example: "Central", + Output: "string", + Aliases: []string{ + "job modifier", + "job adjective", + "role descriptor", + "title descriptor", + "position descriptor", + }, + Keywords: []string{ + "descriptor", "modifier", "adjective", "qualifier", + "job", "role", "title", "position", + "label", "term", "descriptive", "characterization", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return jobDescriptor(f), nil + }, + }) + + AddFuncLookup("joblevel", Info{ + Display: "Job Level", + Category: "company", + Description: "Random job level", + Example: "Assurance", + Output: "string", + Aliases: []string{ + "seniority level", + "career level", + "position level", + "role level", + "job grade", + "job band", + }, + Keywords: []string{ + "level", "seniority", "rank", "tier", "grade", "band", + "entry", "junior", "associate", "mid", "senior", + "lead", "staff", "principal", "manager", "director", + "executive", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return jobLevel(f), nil + }, + }) + + AddFuncLookup("slogan", Info{ + Display: "Slogan", + Category: "company", + Description: "Catchphrase or motto used by a company to represent its brand or values", + Example: "Universal seamless Focus, interactive.", + Output: "string", + Aliases: []string{ + "company slogan", + "brand slogan", + "brand tagline", + "company motto", + "advertising slogan", + }, + Keywords: []string{ + "slogan", "tagline", "motto", "catchphrase", + "brand", "company", "marketing", "advertising", + "identity", "values", "mission", "vision", + "strapline", "promo", "campaign", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return slogan(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/csv.go b/vendor/github.com/brianvoe/gofakeit/v7/csv.go new file mode 100644 index 0000000000..2c8ca61279 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/csv.go @@ -0,0 +1,189 @@ +package gofakeit + +import ( + "bytes" + "encoding/csv" + "encoding/json" + "errors" + "fmt" + "reflect" + "strings" +) + +// CSVOptions defines values needed for csv generation +type CSVOptions struct { + Delimiter string `json:"delimiter" xml:"delimiter" fake:"{randomstring:[,,tab]}"` + RowCount int `json:"row_count" xml:"row_count" fake:"{number:1,10}"` + Fields []Field `json:"fields" xml:"fields" fake:"{fields}"` +} + +// CSV generates an object or an array of objects in json format +// A nil CSVOptions returns a randomly structured CSV. +func CSV(co *CSVOptions) ([]byte, error) { return csvFunc(GlobalFaker, co) } + +// CSV generates an object or an array of objects in json format +// A nil CSVOptions returns a randomly structured CSV. +func (f *Faker) CSV(co *CSVOptions) ([]byte, error) { return csvFunc(f, co) } + +func csvFunc(f *Faker, co *CSVOptions) ([]byte, error) { + if co == nil { + // We didn't get a CSVOptions, so create a new random one + err := f.Struct(&co) + if err != nil { + return nil, err + } + } + + // Check delimiter + if co.Delimiter == "" { + co.Delimiter = "," + } + if strings.ToLower(co.Delimiter) == "tab" { + co.Delimiter = "\t" + } + if co.Delimiter != "," && co.Delimiter != "\t" && co.Delimiter != ";" { + return nil, errors.New("invalid delimiter type") + } + + // Check fields + if len(co.Fields) <= 0 { + return nil, errors.New("must pass fields in order to build json object(s)") + } + + // Make sure you set a row count + if co.RowCount <= 0 { + return nil, errors.New("must have row count") + } + + b := &bytes.Buffer{} + w := csv.NewWriter(b) + w.Comma = []rune(co.Delimiter)[0] + + // Add header row + header := make([]string, len(co.Fields)) + for i, field := range co.Fields { + header[i] = field.Name + } + w.Write(header) + + // Loop through row count +1(for header) and add fields + for i := 1; i < co.RowCount+1; i++ { + vr := make([]string, len(co.Fields)) + + // Loop through fields and add to them to map[string]any + for ii, field := range co.Fields { + if field.Function == "autoincrement" { + vr[ii] = fmt.Sprintf("%d", i) + continue + } + + // Get function info + funcInfo := GetFuncLookup(field.Function) + if funcInfo == nil { + return nil, errors.New("invalid function, " + field.Function + " does not exist") + } + + value, err := funcInfo.Generate(f, &field.Params, funcInfo) + if err != nil { + return nil, err + } + + if _, ok := value.([]byte); ok { + // If it's a slice of bytes or struct, unmarshal it into an interface + var v any + if err := json.Unmarshal(value.([]byte), &v); err != nil { + return nil, err + } + value = v + } + + // If the value is a list of possible values, marsha it into a string + if reflect.TypeOf(value).Kind() == reflect.Struct || + reflect.TypeOf(value).Kind() == reflect.Ptr || + reflect.TypeOf(value).Kind() == reflect.Map || + reflect.TypeOf(value).Kind() == reflect.Slice { + b, err := json.Marshal(value) + if err != nil { + return nil, err + } + value = string(b) + } + + vr[ii] = fmt.Sprintf("%v", value) + } + + w.Write(vr) + } + + w.Flush() + + if err := w.Error(); err != nil { + return nil, err + } + + return b.Bytes(), nil +} + +func addFileCSVLookup() { + AddFuncLookup("csv", Info{ + Display: "CSV", + Category: "file", + Description: "Individual lines or data entries within a CSV (Comma-Separated Values) format", + Example: `id,first_name,last_name,password +1,Markus,Moen,Dc0VYXjkWABx +2,Osborne,Hilll,XPJ9OVNbs5lm`, + Output: "[]byte", + ContentType: "text/csv", + Aliases: []string{ + "comma separated", "csv file", "data table", "flat file", "spreadsheet format", "tabular data", + }, + Keywords: []string{ + "csv", "comma", "separated", "values", "format", "data", "spreadsheet", "entries", "rows", "columns", "dataset", "records", + }, + Params: []Param{ + {Field: "delimiter", Display: "Delimiter", Type: "string", Default: ",", Description: "Separator in between row values"}, + {Field: "rowcount", Display: "Row Count", Type: "int", Default: "100", Description: "Number of rows"}, + {Field: "fields", Display: "Fields", Type: "[]Field", Description: "Fields containing key name and function"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + co := CSVOptions{} + + delimiter, err := info.GetString(m, "delimiter") + if err != nil { + return nil, err + } + co.Delimiter = delimiter + + rowcount, err := info.GetInt(m, "rowcount") + if err != nil { + return nil, err + } + co.RowCount = rowcount + + fieldsStr, err := info.GetStringArray(m, "fields") + if err != nil { + return nil, err + } + + // Check to make sure fields has length + if len(fieldsStr) > 0 { + co.Fields = make([]Field, len(fieldsStr)) + + for i, f := range fieldsStr { + // Unmarshal fields string into fields array + err = json.Unmarshal([]byte(f), &co.Fields[i]) + if err != nil { + return nil, err + } + } + } + + csvOut, err := csvFunc(f, &co) + if err != nil { + return nil, err + } + + return csvOut, nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/README.md b/vendor/github.com/brianvoe/gofakeit/v7/data/README.md new file mode 100644 index 0000000000..64441741c2 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/README.md @@ -0,0 +1,33 @@ +# Gofakeit Data + +Gofakeit data set + +## List + +```go +List() +``` + +## Get/Set/Remove Data + +```go +data.Get("desserts") + +data.Set("desserts", map[string][]string{ + "cake": {"chocolate", "vanilla"}, + "pie": {"apple", "pecan"}, + "ice cream": {"strawberry", "vanilla"}, +}) + +data.Remove("desserts") +``` + +## Get/Set/Remove Sub Data + +```go +data.GetSubData("desserts", "cake") + +data.SetSub("desserts", "cake", []string{"chocolate", "vanilla"}) + +data.RemoveSub("desserts", "cake") +``` diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/address.go b/vendor/github.com/brianvoe/gofakeit/v7/data/address.go new file mode 100644 index 0000000000..b886110957 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/address.go @@ -0,0 +1,16 @@ +package data + +// Address consists of address information +var Address = map[string][]string{ + "number": {"#####", "####", "###"}, + "street_prefix": {"North", "East", "West", "South", "New", "Lake", "Port"}, + "street_name": {"Alley", "Avenue", "Branch", "Bridge", "Brook", "Brooks", "Burg", "Burgs", "Bypass", "Camp", "Canyon", "Cape", "Causeway", "Center", "Centers", "Circle", "Circles", "Cliff", "Cliffs", "Club", "Common", "Corner", "Corners", "Course", "Court", "Courts", "Cove", "Coves", "Creek", "Crescent", "Crest", "Crossing", "Crossroad", "Curve", "Dale", "Dam", "Divide", "Drive", "Drive", "Drives", "Estate", "Estates", "Expressway", "Extension", "Extensions", "Fall", "Falls", "Ferry", "Field", "Fields", "Flat", "Flats", "Ford", "Fords", "Forest", "Forge", "Forges", "Fork", "Forks", "Fort", "Freeway", "Garden", "Gardens", "Gateway", "Glen", "Glens", "Green", "Greens", "Grove", "Groves", "Harbor", "Harbors", "Haven", "Heights", "Highway", "Hill", "Hills", "Hollow", "Inlet", "Inlet", "Island", "Island", "Islands", "Islands", "Isle", "Isle", "Junction", "Junctions", "Key", "Keys", "Knoll", "Knolls", "Lake", "Lakes", "Land", "Landing", "Lane", "Light", "Lights", "Loaf", "Lock", "Locks", "Locks", "Lodge", "Lodge", "Loop", "Mall", "Manor", "Manors", "Meadow", "Meadows", "Mews", "Mill", "Mills", "Mission", "Mission", "Motorway", "Mount", "Mountain", "Mountain", "Mountains", "Mountains", "Neck", "Orchard", "Oval", "Overpass", "Park", "Parks", "Parkway", "Parkways", "Pass", "Passage", "Path", "Pike", "Pine", "Pines", "Place", "Plain", "Plains", "Plains", "Plaza", "Plaza", "Point", "Points", "Port", "Port", "Ports", "Ports", "Prairie", "Prairie", "Radial", "Ramp", "Ranch", "Rapid", "Rapids", "Rest", "Ridge", "Ridges", "River", "Road", "Road", "Roads", "Roads", "Route", "Row", "Rue", "Run", "Shoal", "Shoals", "Shore", "Shores", "Skyway", "Spring", "Springs", "Springs", "Spur", "Spurs", "Square", "Square", "Squares", "Squares", "Station", "Station", "Stravenue", "Stravenue", "Stream", "Stream", "Street", "Street", "Streets", "Summit", "Summit", "Terrace", "Throughway", "Trace", "Track", "Trafficway", "Trail", "Trail", "Tunnel", "Tunnel", "Turnpike", "Turnpike", "Underpass", "Union", "Unions", "Valley", "Valleys", "Via", "Viaduct", "View", "Views", "Village", "Village", "Villages", "Ville", "Vista", "Vista", "Walk", "Walks", "Wall", "Way", "Ways", "Well", "Wells"}, + "street_suffix": {"town", "ton", "land", "ville", "berg", "burgh", "borough", "bury", "view", "port", "mouth", "stad", "furt", "chester", "mouth", "fort", "haven", "side", "shire"}, + "unit": {"Apt", "Apartment", "Suite", "Ste", "Unit", "Floor", "Fl", "Room", "Rm", "Office", "Ofc", "Studio", "Loft", "Penthouse", "Ph"}, + "city": {"New York City", "Los Angeles", "Chicago", "Houston", "Philadelphia", "Phoenix", "San Antonio", "San Diego", "Dallas", "San Jose", "Austin", "Jacksonville", "Indianapolis", "San Francisco", "Columbus", "Fort Worth", "Charlotte", "Detroit", "El Paso", "Memphis", "Boston", "Seattle", "Denver", "Washington", "Nashville-Davidson", "Baltimore", "Louisville/Jefferson", "Portland", "Oklahoma", "Milwaukee", "Las Vegas", "Albuquerque", "Tucson", "Fresno", "Sacramento", "Long Beach", "Kansas", "Mesa", "Virginia Beach", "Atlanta", "Colorado Springs", "Raleigh", "Omaha", "Miami", "Oakland", "Tulsa", "Minneapolis", "Cleveland", "Wichita", "Arlington", "New Orleans", "Bakersfield", "Tampa", "Honolulu", "Anaheim", "Aurora", "Santa Ana", "St. Louis", "Riverside", "Corpus Christi", "Pittsburgh", "Lexington-Fayette", "Stockton", "Cincinnati", "St. Paul", "Toledo", "Newark", "Greensboro", "Plano", "Henderson", "Lincoln", "Buffalo", "Fort Wayne", "Jersey", "Chula Vista", "Orlando", "St. Petersburg", "Norfolk", "Chandler", "Laredo", "Madison", "Durham", "Lubbock", "Winston-Salem", "Garland", "Glendale", "Hialeah", "Reno", "Baton Rouge", "Irvine", "Chesapeake", "Irving", "Scottsdale", "North Las Vegas", "Fremont", "San Bernardino", "Boise", "Birmingham"}, + "state": {"Alabama", "Alaska", "Arizona", "Arkansas", "California", "Colorado", "Connecticut", "Delaware", "Florida", "Georgia", "Hawaii", "Idaho", "Illinois", "Indiana", "Iowa", "Kansas", "Kentucky", "Louisiana", "Maine", "Maryland", "Massachusetts", "Michigan", "Minnesota", "Mississippi", "Missouri", "Montana", "Nebraska", "Nevada", "New Hampshire", "New Jersey", "New Mexico", "New York", "North Carolina", "North Dakota", "Ohio", "Oklahoma", "Oregon", "Pennsylvania", "Rhode Island", "South Carolina", "South Dakota", "Tennessee", "Texas", "Utah", "Vermont", "Virginia", "Washington", "West Virginia", "Wisconsin", "Wyoming"}, + "state_abr": {"AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DE", "FL", "GA", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC", "SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY", "AE", "AA", "AP"}, + "zip": {"#####"}, + "country": {"Andorra", "United Arab Emirates", "Afghanistan", "Antigua and Barbuda", "Anguilla", "Albania", "Armenia", "Angola", "Antarctica", "Argentina", "American Samoa", "Austria", "Australia", "Aruba", "Åland Islands", "Azerbaijan", "Bosnia and Herzegovina", "Barbados", "Bangladesh", "Belgium", "Burkina Faso", "Bulgaria", "Bahrain", "Burundi", "Benin", "Saint Barthélemy", "Bermuda", "Brunei Darussalam", "Bolivia (Plurinational State of)", "Bonaire, Sint Eustatius and Saba", "Brazil", "Bahamas", "Bhutan", "Bouvet Island", "Botswana", "Belarus", "Belize", "Canada", "Cocos (Keeling) Islands", "Congo, Democratic Republic of the", "Central African Republic", "Congo", "Switzerland", "Côte d'Ivoire", "Cook Islands", "Chile", "Cameroon", "China", "Colombia", "Costa Rica", "Cuba", "Cabo Verde", "Curaçao", "Christmas Island", "Cyprus", "Czechia", "Germany", "Djibouti", "Denmark", "Dominica", "Dominican Republic", "Algeria", "Ecuador", "Estonia", "Egypt", "Western Sahara", "Eritrea", "Spain", "Ethiopia", "Finland", "Fiji", "Falkland Islands (Malvinas)", "Micronesia (Federated States of)", "Faroe Islands", "France", "Gabon", "United Kingdom of Great Britain and Northern Ireland", "Grenada", "Georgia", "French Guiana", "Guernsey", "Ghana", "Gibraltar", "Greenland", "Gambia", "Guinea", "Guadeloupe", "Equatorial Guinea", "Greece", "South Georgia and the South Sandwich Islands", "Guatemala", "Guam", "Guinea-Bissau", "Guyana", "Hong Kong", "Heard Island and McDonald Islands", "Honduras", "Croatia", "Haiti", "Hungary", "Indonesia", "Ireland", "Israel", "Isle of Man", "India", "British Indian Ocean Territory", "Iraq", "Iran (Islamic Republic of)", "Iceland", "Italy", "Jersey", "Jamaica", "Jordan", "Japan", "Kenya", "Kyrgyzstan", "Cambodia", "Kiribati", "Comoros", "Saint Kitts and Nevis", "Korea (Democratic People's Republic of)", "Korea, Republic of", "Kuwait", "Cayman Islands", "Kazakhstan", "Lao People's Democratic Republic", "Lebanon", "Saint Lucia", "Liechtenstein", "Sri Lanka", "Liberia", "Lesotho", "Lithuania", "Luxembourg", "Latvia", "Libya", "Morocco", "Monaco", "Moldova, Republic of", "Montenegro", "Saint Martin (French part)", "Madagascar", "Marshall Islands", "North Macedonia", "Mali", "Myanmar", "Mongolia", "Macao", "Northern Mariana Islands", "Martinique", "Mauritania", "Montserrat", "Malta", "Mauritius", "Maldives", "Malawi", "Mexico", "Malaysia", "Mozambique", "Namibia", "New Caledonia", "Niger", "Norfolk Island", "Nigeria", "Nicaragua", "Netherlands", "Norway", "Nepal", "Nauru", "Niue", "New Zealand", "Oman", "Panama", "Peru", "French Polynesia", "Papua New Guinea", "Philippines", "Pakistan", "Poland", "Saint Pierre and Miquelon", "Pitcairn", "Puerto Rico", "Palestine, State of", "Portugal", "Palau", "Paraguay", "Qatar", "Réunion", "Romania", "Serbia", "Russian Federation", "Rwanda", "Saudi Arabia", "Solomon Islands", "Seychelles", "Sudan", "Sweden", "Singapore", "Saint Helena, Ascension and Tristan da Cunha", "Slovenia", "Svalbard and Jan Mayen", "Slovakia", "Sierra Leone", "San Marino", "Senegal", "Somalia", "Suriname", "South Sudan", "Sao Tome and Principe", "El Salvador", "Sint Maarten (Dutch part)", "Syrian Arab Republic", "Eswatini", "Turks and Caicos Islands", "Chad", "French Southern Territories", "Togo", "Thailand", "Tajikistan", "Tokelau", "Timor-Leste", "Turkmenistan", "Tunisia", "Tonga", "Turkey", "Trinidad and Tobago", "Tuvalu", "Taiwan, Province of China", "Tanzania, United Republic of", "Ukraine", "Uganda", "United States Minor Outlying Islands", "United States of America", "Uruguay", "Uzbekistan", "Holy See", "Saint Vincent and the Grenadines", "Venezuela (Bolivarian Republic of)", "Virgin Islands (British)", "Virgin Islands (U.S.)", "Viet Nam", "Vanuatu", "Wallis and Futuna", "Samoa", "Yemen", "Mayotte", "South Africa", "Zambia", "Zimbabwe"}, + "country_abr": {"AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR", "AS", "AT", "AU", "AW", "AX", "AZ", "BA", "BB", "BD", "BE", "BF", "BG", "BH", "BI", "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV", "BW", "BY", "BZ", "CA", "CC", "CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN", "CO", "CR", "CU", "CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE", "EG", "EH", "ER", "ES", "ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL", "GM", "GN", "GP", "GQ", "GR", "GS", "GT", "GU", "GW", "GY", "HK", "HM", "HN", "HR", "HT", "HU", "ID", "IE", "IL", "IM", "IN", "IO", "IQ", "IR", "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM", "KN", "KP", "KR", "KW", "KY", "KZ", "LA", "LB", "LC", "LI", "LK", "LR", "LS", "LT", "LU", "LV", "LY", "MA", "MC", "MD", "ME", "MF", "MG", "MH", "MK", "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT", "MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP", "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PG", "PH", "PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO", "RS", "RU", "RW", "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM", "SN", "SO", "SR", "SS", "ST", "SV", "SX", "SY", "SZ", "TC", "TD", "TF", "TG", "TH", "TJ", "TK", "TL", "TM", "TN", "TO", "TR", "TT", "TV", "TW", "TZ", "UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI", "VN", "VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/animals.go b/vendor/github.com/brianvoe/gofakeit/v7/data/animals.go new file mode 100644 index 0000000000..2e37937440 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/animals.go @@ -0,0 +1,12 @@ +package data + +// Animal consists of animal information +var Animal = map[string][]string{ + "petname": {"Alfalfa", "Archie", "Attila", "Baloo", "Bark Twain", "Barney", "Beans", "Bernadette", "Betty", "Binx", "Biscuit", "Bitsy", "Bob", "Bruiser", "Butterball", "Butters", "Chalupa", "Cheeseburger", "Chewbarka", "Chompers", "Cujo", "Demi", "Dobby", "Doc McDoggins", "Droolius Caesar", "Elmo", "Fergus", "Fluffernutter", "Franz Fur-dinand", "Frodo", "Fyodor Dogstoevsky", "Gary", "Gollum", "Hairy Paw-ter", "Hercules", "Hobbit", "Jabba", "Jellybean", "Jimmy Chew", "Kareem Abdul Ja-Bark", "Kevin", "Khaleesi", "Larry", "Lloyd", "Mary Puppins", "Matilda", "Meatball", "Mister Miyagi", "Moose", "Munchkin", "Nacho", "Noodles", "Nugget", "Olga", "Orville Redenbarker", "Ozzy Pawsborne", "Pam", "Peanut", "Pee Wee", "Pikachu", "Prince of Barkness", "Pumba", "Rambo", "Rex", "Rocky", "Rufus", "Salsa", "Salvador Dogi", "Santa Paws", "Sarah Jessica Barker", "Scrappy", "Sherlock Bones", "Squeakers", "Squirt", "Tank", "Tater", "The Notorious D.O.G.", "Toto", "Twinkie", "Waffles", "Waldo", "Winnie the Poodle", "Woofgang Puck", "Yoda", "Zeus"}, + "animal": {"alligator", "alpaca", "ant", "antelope", "ape", "armadillo", "baboon", "badger", "bat", "bear", "beaver", "bee", "beetle", "buffalo", "butterfly", "camel", "caribou", "cat", "cattle", "cheetah", "chimpanzee", "chinchilla", "cicada", "clam", "cockroach", "cod", "coyote", "crab", "cricket", "crocodile", "crow", "deer", "dinosaur", "dog", "dolphin", "donkey", "duck", "eagle", "eel", "elephant", "elk", "ferret", "fish", "fly", "fox", "frog", "gerbil", "giraffe", "gnat", "gnu", "goat", "goldfish", "goose", "gorilla", "grasshopper", "guinea pig", "hamster", "hare", "hedgehog", "herring", "hippopotamus", "hornet", "horse", "hound", "hyena", "impala", "jackal", "jellyfish", "kangaroo", "koala", "leopard", "lion", "lizard", "llama", "locust", "louse", "macaw", "mallard", "mammoth", "manatee", "marten", "mink", "minnow", "mole", "monkey", "moose", "mosquito", "mouse", "mule", "muskrat", "otter", "ox", "oyster", "panda", "pig", "platypus", "porcupine", "porpoise", "prairie dog", "pug", "rabbit", "raccoon", "rat", "raven", "reindeer", "rhinoceros", "salmon", "sardine", "scorpion", "sea lion", "seal", "serval", "shark", "sheep", "skunk", "snail", "snake", "spider", "squirrel", "swan", "termite", "tiger", "toad", "tortoise", "trout", "turtle", "wallaby", "walrus", "wasp", "water buffalo", "weasel", "whale", "wildebeest", "wolf", "wombat", "woodchuck", "worm", "yak", "yellowjacket", "zebra"}, + "type": {"amphibians", "birds", "fish", "invertebrates", "mammals", "reptiles"}, + "farm": {"Chicken", "Cow", "Donkey", "Duck", "Goat", "Goose", "Horse", "Llama", "Pig", "Sheep", "Turkey"}, + "cat": {"Abyssinian", "Aegean", "American Bobtail", "American Curl", "American Shorthair", "American Wirehair", "Arabian Mau", "Asian", "Asian Semi-longhair", "Australian Mist", "Balinese", "Bambino", "Bengal", "Birman", "Bombay", "Brazilian Shorthair", "British Longhair", "British Semipi-longhair", "British Shorthair", "Burmese", "Burmilla", "California Spangled", "Chantilly-Tiffany", "Chartreux", "Chausie", "Cheetoh", "Colorpoint Shorthair", "Cornish Rex", "Cymric, or Manx Longhair", "Cyprus", "Devon Rex", "Donskoy, or Don Sphynx", "Dragon Li", "Dwarf cat, or Dwelf", "Egyptian Mau", "European Shorthair", "Exotic Shorthair", "Foldex Cat", "German Rex", "Havana Brown", "Highlander", "Himalayan, or Colorpoint Persian", "Japanese Bobtail", "Javanese", "Khao Manee", "Korat", "Korean Bobtail", "Korn Ja", "Kurilian Bobtail", "Kurilian Bobtail, or Kuril Islands Bobtail", "LaPerm", "Lykoi", "Maine Coon", "Manx", "Mekong Bobtail", "Minskin", "Munchkin", "Napoleon", "Nebelung", "Norwegian Forest Cat", "Ocicat", "Ojos Azules", "Oregon Rex", "Oriental Bicolor", "Oriental Longhair", "Oriental Shorthair", "Persian", "Peterbald", "Pixie-bob", "Raas", "Ragamuffin", "Ragdoll", "Russian Blue", "Russian White, Black and Tabby", "Sam Sawet", "Savannah", "Scottish Fold", "Selkirk Rex", "Serengeti", "Serrade petit", "Siamese", "Siberian", "Singapura", "Snowshoe", "Sokoke", "Somali", "Sphynx", "Suphalak", "Thai", "Tonkinese", "Toyger", "Turkish Angora", "Turkish Van", "Ukrainian Levkoy"}, + "dog": {"Affenpinscher", "African", "Airedale", "Akita", "Appenzeller", "Basenji", "Beagle", "Bluetick", "Borzoi", "Bouvier", "Boxer", "Brabancon", "Briard", "Boston Bulldog", "French Bulldog", "Staffordshire Bullterrier", "Cairn", "Chihuahua", "Chow", "Clumber", "Border Collie", "Coonhound", "Cardigan Corgi", "Dachshund", "Great Dane", "Scottish Deerhound", "Dhole", "Dingo", "Doberman", "Norwegian Elkhound", "Entlebucher", "Eskimo", "Germanshepherd", "Italian Greyhound", "Groenendael", "Ibizan Hound", "Afghan Hound", "Basset Hound", "Blood Hound", "English Hound", "Walker Hound", "Husky", "Keeshond", "Kelpie", "Komondor", "Kuvasz", "Labrador", "Leonberg", "Lhasa", "Malamute", "Malinois", "Maltese", "Bull Mastiff", "Tibetan Mastiff", "Mexicanhairless", "Bernese Mountain", "Swiss Mountain", "Newfoundland", "Otterhound", "Papillon", "Pekinese", "Pembroke", "Miniature Pinscher", "German Pointer", "Pomeranian", "Miniature Poodle", "Standard Poodle", "Toy Poodle", "Pug", "Pyrenees", "Redbone", "Chesapeake Retriever", "Curly Retriever", "Flatcoated Retriever", "Golden Retriever", "Rhodesian Ridgeback", "Rottweiler", "Saluki", "Samoyed", "Schipperke", "Giant Schnauzer", "Miniature Schnauzer", "English Setter", "Gordon Setter", "Irish Setter", "English Sheepdog", "Shetland Sheepdog", "Shiba", "Shihtzu", "Blenheim Spaniel", "Brittany Spaniel", "Cocker Spaniel", "Irish Spaniel", "Japanese Spaniel", "Sussex Spaniel", "Welsh Spaniel", "English Springer", "Stbernard", "American Terrier", "Australian Terrier", "Bedlington Terrier", "Border Terrier", "Dandie Terrier", "Fox Terrier", "Irish Terrier", "Kerryblue Terrier", "Lakeland Terrier", "Norfolk Terrier", "Norwich Terrier", "Patterdale Terrier", "Rat Terrier", "Scottish Terrier", "Sealyham Terrier", "Silky Terrier", "Tibetan Terrier", "Toy Terrier", "Westhighland Terrier", "Wheaten Terrier", "Yorkshire Terrier", "Vizsla", "Weimaraner", "Whippet", "Irish Wolfhound"}, + "bird": {"albatross", "bluejay", "canary", "cardinal", "chicken", "crow", "dove", "duck", "eagle", "emu", "falcon", "flamingo", "goose", "hornbill", "hummingbird", "ibis", "jay", "kingfisher", "lovebird", "mynah", "nightingale", "oriole", "ostrich", "owl", "parrot", "peacock", "penguin", "quail", "rooster", "sparrow", "swan", "thrush", "toucan", "vulture", "woodpecker", "yellow warbler"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/bank.go b/vendor/github.com/brianvoe/gofakeit/v7/data/bank.go new file mode 100644 index 0000000000..47e81b265e --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/bank.go @@ -0,0 +1,67 @@ +package data + +var Bank = map[string][]string{ + "name": { + "Agricultural Bank of China", + "BNP Paribas", + "Banco Bilbao Vizcaya Argentaria", + "Banco Santander", + "Bank of America", + "Bank of China", + "Bank of Communications", + "Barclays", + "Capital One Financial Corporation", + "China Citic Bank", + "China Construction Bank Corporation", + "China Everbright Bank", + "China Merchants Bank", + "China Minsheng Bank", + "Citigroup", + "Commonwealth Bank Group", + "Credit Agricole Group", + "Credit Mutuel", + "Deutsche Bank", + "Goldman Sachs", + "Groupe BPCE", + "HDFC Bank", + "HSBC Holdings", + "Hua Xia Bank", + "ING Group", + "Industrial Bank", + "Industrial and Commercial Bank of China", + "Intesa Sanpaolo", + "JP Morgan Chase & Co", + "Lloyds Banking Group", + "Mitsubishi UFJ Financial Group", + "Mizuho Financial Group", + "Morgan Stanley", + "PNC Financial Services Group", + "Ping An Bank", + "Postal Savings Bank of China", + "Rabobank Group", + "Royal Bank of Canada", + "Sberbank", + "Scotiabank", + "Shanghai Pudong Development Bank", + "Societe Generale", + "State Bank of India", + "Sumitomo Mitsui Financial Group", + "Toronto Dominion Bank", + "Truist Bank", + "UBS", + "US Bancorp", + "UniCredit", + "Wells Fargo & Co", + }, + "type": { + "Central Bank", + "Commercial Bank", + "Cooperative Bank", + "Investment Bank", + "Online Bank", + "Policy Bank", + "Private Bank", + "Retail Bank", + "Savings Bank", + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/beer.go b/vendor/github.com/brianvoe/gofakeit/v7/data/beer.go new file mode 100644 index 0000000000..1192907d5f --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/beer.go @@ -0,0 +1,10 @@ +package data + +// Beer consists of various beer information +var Beer = map[string][]string{ + "name": {"Pliny The Elder", "Founders Kentucky Breakfast", "Trappistes Rochefort 10", "HopSlam Ale", "Stone Imperial Russian Stout", "St. Bernardus Abt 12", "Founders Breakfast Stout", "Weihenstephaner Hefeweissbier", "Péché Mortel", "Celebrator Doppelbock", "Duvel", "Dreadnaught IPA", "Nugget Nectar", "La Fin Du Monde", "Bourbon County Stout", "Old Rasputin Russian Imperial Stout", "Two Hearted Ale", "Ruination IPA", "Schneider Aventinus", "Double Bastard Ale", "90 Minute IPA", "Hop Rod Rye", "Trappistes Rochefort 8", "Chimay Grande Réserve", "Stone IPA", "Arrogant Bastard Ale", "Edmund Fitzgerald Porter", "Chocolate St", "Oak Aged Yeti Imperial Stout", "Ten FIDY", "Storm King Stout", "Shakespeare Oatmeal", "Alpha King Pale Ale", "Westmalle Trappist Tripel", "Samuel Smith’s Imperial IPA", "Yeti Imperial Stout", "Hennepin", "Samuel Smith’s Oatmeal Stout", "Brooklyn Black", "Oaked Arrogant Bastard Ale", "Sublimely Self-Righteous Ale", "Trois Pistoles", "Bell’s Expedition", "Sierra Nevada Celebration Ale", "Sierra Nevada Bigfoot Barleywine Style Ale", "Racer 5 India Pale Ale, Bear Republic Bre", "Orval Trappist Ale", "Hercules Double IPA", "Maharaj", "Maudite"}, + "hop": {"Ahtanum", "Amarillo", "Bitter Gold", "Bravo", "Brewer’s Gold", "Bullion", "Cascade", "Cashmere", "Centennial", "Chelan", "Chinook", "Citra", "Cluster", "Columbia", "Columbus", "Comet", "Crystal", "Equinox", "Eroica", "Fuggle", "Galena", "Glacier", "Golding", "Hallertau", "Horizon", "Liberty", "Magnum", "Millennium", "Mosaic", "Mt. Hood", "Mt. Rainier", "Newport", "Northern Brewer", "Nugget", "Olympic", "Palisade", "Perle", "Saaz", "Santiam", "Simcoe", "Sorachi Ace", "Sterling", "Summit", "Tahoma", "Tettnang", "TriplePearl", "Ultra", "Vanguard", "Warrior", "Willamette", "Yakima Gol"}, + "yeast": {"1007 - German Ale", "1010 - American Wheat", "1028 - London Ale", "1056 - American Ale", "1084 - Irish Ale", "1098 - British Ale", "1099 - Whitbread Ale", "1187 - Ringwood Ale", "1272 - American Ale II", "1275 - Thames Valley Ale", "1318 - London Ale III", "1332 - Northwest Ale", "1335 - British Ale II", "1450 - Dennys Favorite 50", "1469 - West Yorkshire Ale", "1728 - Scottish Ale", "1968 - London ESB Ale", "2565 - Kölsch", "1214 - Belgian Abbey", "1388 - Belgian Strong Ale", "1762 - Belgian Abbey II", "3056 - Bavarian Wheat Blend", "3068 - Weihenstephan Weizen", "3278 - Belgian Lambic Blend", "3333 - German Wheat", "3463 - Forbidden Fruit", "3522 - Belgian Ardennes", "3638 - Bavarian Wheat", "3711 - French Saison", "3724 - Belgian Saison", "3763 - Roeselare Ale Blend", "3787 - Trappist High Gravity", "3942 - Belgian Wheat", "3944 - Belgian Witbier", "2000 - Budvar Lager", "2001 - Urquell Lager", "2007 - Pilsen Lager", "2035 - American Lager", "2042 - Danish Lager", "2112 - California Lager", "2124 - Bohemian Lager", "2206 - Bavarian Lager", "2278 - Czech Pils", "2308 - Munich Lager", "2633 - Octoberfest Lager Blend", "5112 - Brettanomyces bruxellensis", "5335 - Lactobacillus", "5526 - Brettanomyces lambicus", "5733 - Pediococcus"}, + "malt": {"Black malt", "Caramel", "Carapils", "Chocolate", "Munich", "Caramel", "Carapils", "Chocolate malt", "Munich", "Pale", "Roasted barley", "Rye malt", "Special roast", "Victory", "Vienna", "Wheat mal"}, + "style": {"Light Lager", "Pilsner", "European Amber Lager", "Dark Lager", "Bock", "Light Hybrid Beer", "Amber Hybrid Beer", "English Pale Ale", "Scottish And Irish Ale", "Merican Ale", "English Brown Ale", "Porter", "Stout", "India Pale Ale", "German Wheat And Rye Beer", "Belgian And French Ale", "Sour Ale", "Belgian Strong Ale", "Strong Ale", "Fruit Beer", "Vegetable Beer", "Smoke-flavored", "Wood-aged Beer"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/book.go b/vendor/github.com/brianvoe/gofakeit/v7/data/book.go new file mode 100644 index 0000000000..ec3e5d849b --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/book.go @@ -0,0 +1,101 @@ +package data + +var Books = map[string][]string{ + "title": { + "Anna Karenina", + "Beloved", + "Blindness", + "Bostan", + "Buddenbrooks", + "Crime and Punishment", + "Don Quijote De La Mancha", + "Fairy tales", + "Faust", + "Gulliver's Travels", + "Gypsy Ballads", + "Hamlet", + "Harry potter and the sorcerer's stone", + "King Lear", + "Leaves of Grass", + "Lolita", + "Madame Bovary", + "Memoirs of Hadrian", + "Metamorphoses", + "Moby Dick", + "Nineteen Eighty-Four", + "Odyssey", + "Oedipus the King", + "One Hundred Years of Solitude", + "One Thousand and One Nights", + "Othello", + "Pippi Longstocking", + "Pride and Prejudice", + "Romeo & Juliet", + "Sherlock Holmes", + "Sons and Lovers", + "The Adventures of Huckleberry Finn", + "The Book Of Job", + "The Brothers Karamazov", + "The Golden Notebook", + "The Idiot", + "The Old Man and the Sea", + "The Stranger", + "Things Fall Apart", + "Ulysses", + "War and Peace", + "Wuthering Heights", + "Zorba the Greek", + }, + "author": { + "Albert Camus", + "Astrid Lindgren", + "Charles Dickens", + "D. H. Lawrence", + "Edgar Allan Poe", + "Emily Brontë", + "Ernest Hemingway", + "Franz Kafka", + "Fyodor Dostoevsky", + "George Orwell", + "Hans Christian Andersen", + "Homer", + "James Joyce", + "Jane Austen", + "Johann Wolfgang von Goethe", + "Jorge Luis Borges", + "Joanne K. Rowling", + "Leo Tolstoy", + "Marcel Proust", + "Mark Twain", + "Paul Celan", + "Salman Rushdie", + "Sophocles", + "Thomas Mann", + "Toni Morrison", + "Vladimir Nabokov", + "William Faulkner", + "William Shakespeare", + "Yasunari Kawabata", + }, + "genre": { + "Adventure", + "Comic", + "Crime", + "Erotic", + "Fiction", + "Fantasy", + "Historical", + "Horror", + "Magic", + "Mystery", + "Philosophical", + "Political", + "Romance", + "Saga", + "Satire", + "Science", + "Speculative", + "Thriller", + "Urban", + }, +} \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/car.go b/vendor/github.com/brianvoe/gofakeit/v7/data/car.go new file mode 100644 index 0000000000..8754b1220e --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/car.go @@ -0,0 +1,10 @@ +package data + +// Car Beer consists of various beer information +var Car = map[string][]string{ + "type": {"Passenger car mini", "Passenger car light", "Passenger car compact", "Passenger car medium", "Passenger car heavy", "Sport utility vehicle", "Pickup truck", "Van"}, + "fuel_type": {"Gasoline", "Methanol", "Ethanol", "Diesel", "LPG", "CNG", "Electric"}, + "transmission_type": {"Manual", "Automatic"}, + "maker": {"Alfa Romeo", "Aston Martin", "Audi", "Bentley", "Benz", "BMW", "Bugatti", "Cadillac", "Chevrolet", "Chrysler", "Citroen", "Corvette", "DAF", "Dacia", "Daewoo", "Daihatsu", "Datsun", "De Lorean", "Dino", "Dodge", "Farboud", "Ferrari", "Fiat", "Ford", "Honda", "Hummer", "Hyundai", "Jaguar", "Jeep", "KIA", "Koenigsegg", "Lada", "Lamborghini", "Lancia", "Land Rover", "Lexus", "Ligier", "Lincoln", "Lotus", "Martini", "Maserati", "Maybach", "Mazda", "McLaren", "Mercedes", "Mercedes-Benz", "Mini", "Mitsubishi", "Nissan", "Noble", "Opel", "Peugeot", "Pontiac", "Porsche", "Renault", "Rolls-Royce", "Rover", "Saab", "Seat", "Skoda", "Smart", "Spyker", "Subaru", "Suzuki", "Toyota", "Tesla", "Vauxhall", "Volkswagen", "Volvo"}, + "model": {"Db9 Coupe", "Db9 Coupe Manual", "Db9 Volante", "V12 Vanquish S", "V8 Vantage", "A3", "A4", "A4 Avant Quattro", "A4 Cabriolet", "A4 Cabriolet Quattro", "A4 Quattro", "A6", "A6 Avant Quattro", "A6 Quattro", "A8 L", "Gti", "Passat", "S4", "S4 Avant", "S4 Cabriolet", "Tt Coupe", "Tt Roadster", "Bentley Arnage", "Continental Flying Spur", "Continental Gt", "325ci Convertible", "325i", "325xi", "325xi Sport Wagon", "330ci Convertible", "330i", "330xi", "525i", "525xi", "530i", "530xi", "530xi Sport Wagon", "550i", "650ci", "650ci Convertible", "750li", "760li", "M3", "M3 Convertible", "M5", "M6", "Mini Cooper", "Mini Cooper Convertible", "Mini Cooper S", "Mini Cooper S Convertible", "X3", "X5", "X5 4.8is", "Z4 3.0 Si Coupe", "Z4 3.0i", "Z4 3.0si", "Z4 M Roadster", "Veyron", "300c/srt-8", "Caravan 2wd", "Charger", "Commander 4wd", "Crossfire Roadster", "Dakota Pickup 2wd", "Dakota Pickup 4wd", "Durango 2wd", "Durango 4wd", "Grand Cherokee 2wd", "Grand Cherokee 4wd", "Liberty/cherokee 2wd", "Liberty/cherokee 4wd", "Pacifica 2wd", "Pacifica Awd", "Pt Cruiser", "Ram 1500 Pickup 2wd", "Ram 1500 Pickup 4wd", "Sebring 4-dr", "Stratus 4-dr", "Town & Country 2wd", "Viper Convertible", "Wrangler/tj 4wd", "F430", "Ferrari 612 Scaglietti", "Ferrari F141", "B4000 4wd", "Crown Victoria Police", "E150 Club Wagon", "E150 Econoline 2wd", "Escape 4wd", "Escape Fwd", "Escape Hybrid 4wd", "Escape Hybrid Fwd", "Expedition 2wd", "Explorer 2wd", "Explorer 4wd", "F150 Ffv 2wd", "F150 Ffv 4wd", "F150 Pickup 2wd", "F150 Pickup 4wd", "Five Hundred Awd", "Focus Fwd", "Focus Station Wag", "Freestar Wagon Fwd", "Freestyle Awd", "Freestyle Fwd", "Grand Marquis", "Gt 2wd", "Ls", "Mark Lt", "Milan", "Monterey Wagon Fwd", "Mountaineer 4wd", "Mustang", "Navigator 2wd", "Ranger Pickup 2wd", "Ranger Pickup 4wd", "Taurus", "Taurus Ethanol Ffv", "Thunderbird", "Town Car", "Zephyr", "B9 Tribeca Awd", "Baja Awd", "Forester Awd", "Impreza Awd", "Impreza Wgn/outback Spt Awd", "Legacy Awd", "Legacy Wagon Awd", "Outback Awd", "Outback Wagon Awd", "9-3 Convertible", "9-3 Sport Sedan", "9-5 Sedan", "C15 Silverado Hybrid 2wd", "C1500 Silverado 2wd", "C1500 Suburban 2wd", "C1500 Tahoe 2wd", "C1500 Yukon 2wd", "Cobalt", "Colorado 2wd", "Colorado 4wd", "Colorado Cab Chassis Inc 2wd", "Colorado Crew Cab 2wd", "Colorado Crew Cab 4wd", "Corvette", "Cts", "Dts", "Envoy 2wd", "Envoy Xl 4wd", "Equinox Awd", "Equinox Fwd", "Escalade 2wd", "Escalade Esv Awd", "G15/25chev Van 2wd Conv", "G1500/2500 Chevy Express 2wd", "G1500/2500 Chevy Van 2wd", "G6", "G6 Gt/gtp Convertible", "Grand Prix", "Gto", "H3 4wd", "Hhr Fwd", "I-280 2wd Ext Cab", "Impala", "K15 Silverado Hybrid 4wd", "K1500 Avalanche 4wd", "K1500 Silverado 4wd", "K1500 Tahoe 4wd", "Lacrosse/allure", "Limousine", "Malibu", "Montana Sv6 Awd", "Monte Carlo", "Rendezvous Awd", "Rendezvous Fwd", "Solstice", "Srx 2wd", "Srx Awd", "Ssr Pickup 2wd", "Sts", "Sts Awd", "Terraza Fwd", "Trailblazer 2wd", "Trailblazer 4wd", "Trailblazer Awd", "Trailblazer Ext 4wd", "Uplander Fwd", "Vue Awd", "Vue Fwd", "Xlr", "Aveo", "Forenza", "Forenza Wagon", "Verona", "Accord", "Accord Hybrid", "Civic", "Civic Hybrid", "Cr-v 4wd", "Element 2wd", "Element 4wd", "Insight", "Mdx 4wd", "Odyssey 2wd", "Pilot 2wd", "Pilot 4wd", "Ridgeline 4wd", "Rl", "Rsx", "S2000", "Tl", "Tsx", "Accent", "Azera", "Elantra", "Santafe 2wd", "Santafe 4wd", "Sonata", "Tiburon", "Tucson 2wd", "Tucson 4wd", "S-type 3.0 Litre", "S-type 4.2 Litre", "S-type R", "Vdp Lwb", "Xj8", "Xk8 Convertible", "Xkr Convertible", "X-type", "X-type Sport Brake", "Amanti", "Optima", "Optima(ms)", "Rio", "Sedona", "Sorento 2wd", "Sorento 4wd", "Spectra(ld)", "Sportage 2wd", "Sportage 4wd", "L-140/715 Gallardo", "L-147/148 Murcielago", "Lr3", "Range Rover", "Range Rover Sport", "Elise/exige", "Coupe Cambiocorsa/gt/g-sport", "Quattroporte", "Mazda 3", "Mazda 5", "Mazda 6", "Mazda 6 Sport Wagon", "Mazda Rx-8", "Mpv", "Mx-5", "C230", "C280", "C280 4matic", "C350", "C350 4matic", "C55 Amg", "Cl65 Amg", "Clk350", "Clk350 (cabriolet)", "Clk55 Amg (cabriolet)", "Cls500", "Cls55 Amg", "E320 Cdi", "E350", "E350 (wagon)", "E350 4matic", "E350 4matic (wagon)", "E500", "E55 Amg", "E55 Amg (wagon)", "Maybach 57s", "Maybach 62", "Ml350", "Ml500", "R350", "R500", "S350", "S430", "Sl500", "Sl600", "Sl65 Amg", "Slk280", "Slk350", "Slr", "Eclipse", "Endeavor 2wd", "Endeavor 4wd", "Galant", "Lancer", "Lancer Evolution", "Lancer Sportback", "Montero", "Outlander 2wd", "Outlander 4wd", "Vibe", "350z", "350z Roadster", "Altima", "Armada 2wd", "Armada 4wd", "Frontier 2wd", "Frontier V6-2wd", "Frontier V6-4wd", "Fx35 Awd", "Fx35 Rwd", "Fx45 Awd", "G35", "M35", "M35x", "M45", "Maxima", "Murano Awd", "Murano Fwd", "Pathfinder 2wd", "Pathfinder 4wd", "Q45", "Q45 Sport", "Quest", "Qx56 4wd", "Sentra", "Titan 2wd", "Titan 4wd", "Xterra 2wd", "Xterra 4wd", "Boxster", "Boxster S", "Carrera 2 Coupe", "Cayenne", "Cayenne S", "Cayenne Turbo", "Cayman S", "Phantom", "F150 Supercrew 4wd", "C8 Spyder", "Aerio", "Aerio Sx", "Aerio Sx Awd", "Grand Vitara Xl-7", "Grand Vitara Xl-7 4wd", "Grand Vitara Xv6", "Grand Vitara Xv6 Awd", "4runner 2wd", "4runner 4wd", "Avalon", "Camry", "Camry Solara", "Camry Solara Convertible", "Corolla", "Corolla Matrix", "Es 330", "Gs 300 4wd", "Gs 300/gs 430", "Gx 470", "Highlander 2wd", "Highlander 4wd", "Highlander Hybrid 2wd", "Highlander Hybrid 4wd", "Is 250", "Is 250 Awd", "Is 350", "Ls 430", "Lx 470", "Prius", "Rav4 2wd", "Rav4 4wd", "Rx 330 2wd", "Rx 330 4wd", "Rx 400h 4wd", "Sc 430", "Scion Tc", "Scion Xa", "Scion Xb", "Sequoia 2wd", "Sequoia 4wd", "Sienna 2wd", "Sienna 4wd", "Toyota Tacoma 2wd", "Toyota Tacoma 4wd", "Toyota Tundra 2wd", "Toyota Tundra 4wd", "Yaris", "A3 Quattro", "Golf", "Jetta", "New Beetle", "New Beetle Convertible", "Passat Wagon 4motion", "Phaeton", "Rabbit", "Touareg", "Tt Coupe Quattro", "Tt Roadster Quattro", "C70 Convertible", "S40 Awd", "S40 Fwd", "S60 Awd", "S60 Fwd", "S60 R Awd", "S80 Fwd", "V50 Awd", "V70 Fwd", "V70 R Awd", "Xc 70 Awd", "Xc 90 Awd", "Xc 90 Fwd"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/celebrity.go b/vendor/github.com/brianvoe/gofakeit/v7/data/celebrity.go new file mode 100644 index 0000000000..ae4fcffd8d --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/celebrity.go @@ -0,0 +1,7 @@ +package data + +var Celebrity = map[string][]string{ + "actor": {"Arnold Schwarzenegger", "Jim Carrey", "Emma Watson", "Robert Downey Jr.", "Daniel Radcliffe", "Chris Evans", "Leonardo DiCaprio", "Tom Cruise", "Brad Pitt", "Charles Chaplin", "Morgan Freeman", "Tom Hanks", "Hugh Jackman", "Matt Damon", "Sylvester Stallone", "Will Smith", "Clint Eastwood", "Cameron Diaz", "George Clooney", "Steven Spielberg", "Harrison Ford", "Robert De Niro", "Al Pacino", "Robert Downey Jr.", "Russell Crowe", "Liam Neeson", "Kate Winslet", "Mark Wahlberg", "Natalie Portman", "Pierce Brosnan", "Sean Connery", "Orlando Bloom", "Dwayne Johnson", "Jackie Chan", "Angelina Jolie", "Adam Sandler", "Scarlett Johansson", "Heath Ledger", "Anne Hathaway", "Jessica Alba", "Edward Norton", "Keira Knightley", "Bradley Cooper", "Will Ferrell", "Julia Roberts", "Nicolas Cage", "Daniel Craig", "Keanu Reeves", "Ian McKellen", "Halle Berry", "Bruce Willis", "Ben Stiller", "Tommy Lee Jones", "Antonio Banderas", "Denzel Washington", "Steve Carell", "Shia LaBeouf", "Megan Fox", "James Franco", "Mel Gibson", "Vin Diesel", "Tim Allen", "Robin Williams", "Kevin Spacey", "Jason Biggs", "Seann William Scott", "Jean-Claude Van Damme", "Zach Galifianakis", "Owen Wilson", "Christian Bale", "Peter Jackson", "Sandra Bullock", "Bruce Lee", "Drew Barrymore", "Macaulay Culkin", "Jack Nicholson", "Bill Murray", "Sigourney Weaver", "Jake Gyllenhaal", "Jason Statham", "Jet Li", "Kate Beckinsale", "Rowan Atkinson", "Marlon Brando", "John Travolta", "Channing Tatum", "Ben Affleck", "Shah Rukh Khan", "Jennifer Aniston", "Emma Stone", "Chris Hemsworth", "James McAvoy", "James Cameron", "Amitabh Bachchan", "Brendan Fraser", "Rachel McAdams", "Tom Hiddleston", "Aamir Khan"}, + "business": {"Elon Musk", "Steve Jobs", "Jeff Bezos", "Bill Gates", "Mark Zuckerberg", "Sundar Pichai", "Walt Disney", "Warren Buffett", "Mukesh Ambani", "P. T. Barnum", "Colonel Sanders", "Ray Kroc", "Richard Branson", "Henry Ford", "Larry Page", "Steve Wozniak", "Ratan Tata", "John D. Rockefeller", "Madam C. J. Walker", "Tim Cook", "Andrew Carnegie", "Paul Allen", "Bobby Flay", "J. P. Morgan", "Satya Nadella", "Dhirubhai Ambani", "Carlos Slim", "Ross Perot", "Jamie Oliver", "Jack Ma", "Larry Ellison", "Sam Walton", "Sheryl Sandberg", "Marco Pierre White", "Indra Nooyi", "David Rockefeller", "Steve Ballmer", "Beyonce Knowles", "N. R. Narayana Murthy", "Mark Wahlberg", "Cameron Diaz", "Sergey Brin", "Howard Hughes", "Jessica Alba", "Dustin Moskovitz", "Eva Mendes", "Amancio Ortega Gaona", "Fred Trump", "Jamsetji Tata", "Kate Hudson", "Martha Stewart", "Peter Jones", "Marco Polo", "Susan Wojcicki", "Oskar Schindler", "Elizabeth Hurley", "Sean Combs", "Kate Spade", "Vincent McMahon", "David Chang", "Coco Chanel", "Vera Wang", "Arianna Huffington", "John McAfee", "Dany Garcia", "Richard Attenborough", "Donatella Versace", "Chris Hughes", "Alexis Ohanian", "J. Paul Getty", "Sharon Osbourne", "Bob Iger", "Kate Walsh", "Chris Gardner", "Jessica Simpson", "Guy Fieri", "Joy Mangano", "Wolfgang Puck", "Christie Brinkley", "Tom Steyer", "Evan Spiegel", "Hugh Hefner", "Preity Zinta", "Shane McMahon", "Salt Bae", "Mario Batali", "Bernard Arnault", "Michael Bloomberg", "Portia de Rossi", "Kevin O'Leary", "Roman Abramovich", "Jamie Dimon", "Rob Dyrdek", "Emeril Lagasse", "Kat Von D", "Karlie Kloss", "Antoni Porowski", "Edmond James de Rothschild", "Mitt Romney", "Aristotle Onassis", "Richard Benjamin Harrison", "Ben Bernanke", "Mark Cuban", "William Randolph Hearst", "Nate Robinson", "Alan Shepard", "Christina Anstead", "Laurene Powell Jobs", "Adam Weitsman", "Gladys Knight", "Gary Vaynerchuk", "Robert Kraft", "John Paul DeJoria", "Lori Greiner", "Carly Fiorina", "Lakshmi Mittal", "Jerry Jones", "Meg Whitman", "Azim Premji", "Lisa Vanderpump", "Dana White", "Russell Simmons", "Jennifer Flavin", "Harry Hamlin", "Conrad Hilton", "Prescott Bush", "Alvaro Morte", "Shigeru Miyamoto", "Phil Knight", "Jack Dorsey", "Barbara Bush", "Lee Iacocca", "Ma Huateng", "Rick Harrison", "Drew Scott", "Jawed Karim", "Daymond John", "Jaclyn Smith", "Maryse Ouellet", "Allegra Versace"}, + "sport": {"Pele", "Usain Bolt", "Muhammad Ali", "Carl Lewis", "Jesse Owens", "Sir Donald Bradman", "Billie Jean King", "Eddy Merckx", "Jackie Joyner-Kersee", "Lionel Messi", "Babe Didrikson Zaharias", "Michael Jordan", "Larisa Latynina", "Diego Maradona", "Serena Williams", "Babe Ruth", "Roger Federer", "Martina Navratilova", "Michael Phelps", "Lottie Dod", "Sachin Tendulkar", "Johan Cruyff", "Tiger Woods", "Sonja Henie", "Aryton Senna", "Nadia Comaneci", "Sergei Bubka", "Emil Zatopek", "Manny Pacquiao", "Imran Khan", "Jackie Robinson", "Shane Warne", "Dhyan Chand", "Fred Perry", "Lin Dan", "Abebe Bikila", "Clara Hughes", "Jan-Ove Waldner", "Bobby Moore", "Bjorn Borg", "Karch Kiraly", "Bradley Wiggins", "Seve Ballesteros", "David Beckham", "Michael Schumacher", "Greg Lemond", "Mia Hamm", "Jacques Anquetil", "Jack Nicklaus", "Steve Davis", "John McEnroe", "Monica Seles", "Magic Johnson", "Joe DiMaggio", "Roger Bannister", "Mo Farah", "Mark Spitz", "Chris Evert", "Al Oerter", "Jimmy Connors", "Michael Johnson", "Ian Botham", "Jim Thorpe", "Sir Steve Redgrave", "Steffi Graf", "Sebastian Coe", "Hicham El Guerrouj", "Eric Liddell", "W.G Grace", "Kenenisa Bekele", "Bernard Hinault", "Bob Beamon", "Paavo Nurmi", "David Campese", "Kelly Slater", "Haile Gebreselassie", "Rafael Nadal", "Brian Lara", "Chris Hoy", "Serge Blanco", "Cristiano Ronaldo", "Sir Gary Sobers", "Andy Murray", "George Best", "Sir Viv Richards", "Fausto Coppi", "Eusebio", "Rod Laver", "Grete Waitz", "Margaret Smith Court", "Tegla Laroupe", "Fanny Blankers-Koen", "Asbel Kiprop", "Lewis Hamilton", "C.B.Fry", "Annika Sörenstam", "Wilma Rudolph", "Alberta Tomba", "Bo Jackson"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/colors.go b/vendor/github.com/brianvoe/gofakeit/v7/data/colors.go new file mode 100644 index 0000000000..96761aad29 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/colors.go @@ -0,0 +1,110 @@ +package data + +// Colors consists of color information +var Colors = map[string][]string{ + "safe": {"black", "maroon", "green", "navy", "olive", "purple", "teal", "lime", "blue", "silver", "gray", "yellow", "fuchsia", "aqua", "white"}, + "full": {"AliceBlue", "AntiqueWhite", "Aqua", "Aquamarine", "Azure", "Beige", "Bisque", "Black", "BlanchedAlmond", "Blue", "BlueViolet", "Brown", "BurlyWood", "CadetBlue", "Chartreuse", "Chocolate", "Coral", "CornflowerBlue", "Cornsilk", "Crimson", "Cyan", "DarkBlue", "DarkCyan", "DarkGoldenRod", "DarkGray", "DarkGreen", "DarkKhaki", "DarkMagenta", "DarkOliveGreen", "Darkorange", "DarkOrchid", "DarkRed", "DarkSalmon", "DarkSeaGreen", "DarkSlateBlue", "DarkSlateGray", "DarkTurquoise", "DarkViolet", "DeepPink", "DeepSkyBlue", "DimGray", "DimGrey", "DodgerBlue", "FireBrick", "FloralWhite", "ForestGreen", "Fuchsia", "Gainsboro", "GhostWhite", "Gold", "GoldenRod", "Gray", "Green", "GreenYellow", "HoneyDew", "HotPink", "IndianRed", "Indigo", "Ivory", "Khaki", "Lavender", "LavenderBlush", "LawnGreen", "LemonChiffon", "LightBlue", "LightCoral", "LightCyan", "LightGoldenRodYellow", "LightGray", "LightGreen", "LightPink", "LightSalmon", "LightSeaGreen", "LightSkyBlue", "LightSlateGray", "LightSteelBlue", "LightYellow", "Lime", "LimeGreen", "Linen", "Magenta", "Maroon", "MediumAquaMarine", "MediumBlue", "MediumOrchid", "MediumPurple", "MediumSeaGreen", "MediumSlateBlue", "MediumSpringGreen", "MediumTurquoise", "MediumVioletRed", "MidnightBlue", "MintCream", "MistyRose", "Moccasin", "NavajoWhite", "Navy", "OldLace", "Olive", "OliveDrab", "Orange", "OrangeRed", "Orchid", "PaleGoldenRod", "PaleGreen", "PaleTurquoise", "PaleVioletRed", "PapayaWhip", "PeachPuff", "Peru", "Pink", "Plum", "PowderBlue", "Purple", "Red", "RosyBrown", "RoyalBlue", "SaddleBrown", "Salmon", "SandyBrown", "SeaGreen", "SeaShell", "Sienna", "Silver", "SkyBlue", "SlateBlue", "SlateGray", "Snow", "SpringGreen", "SteelBlue", "Tan", "Teal", "Thistle", "Tomato", "Turquoise", "Violet", "Wheat", "White", "WhiteSmoke", "Yellow", "YellowGreen"}, +} + +var ColorsNice = [][]string{ + {"#69d2e7", "#a7dbd8", "#e0e4cc", "#f38630", "#fa6900"}, + {"#fe4365", "#fc9d9a", "#f9cdad", "#c8c8a9", "#83af9b"}, + {"#ecd078", "#d95b43", "#c02942", "#542437", "#53777a"}, + {"#556270", "#4ecdc4", "#c7f464", "#ff6b6b", "#c44d58"}, + {"#774f38", "#e08e79", "#f1d4af", "#ece5ce", "#c5e0dc"}, + {"#e8ddcb", "#cdb380", "#036564", "#033649", "#031634"}, + {"#490a3d", "#bd1550", "#e97f02", "#f8ca00", "#8a9b0f"}, + {"#594f4f", "#547980", "#45ada8", "#9de0ad", "#e5fcc2"}, + {"#00a0b0", "#6a4a3c", "#cc333f", "#eb6841", "#edc951"}, + {"#e94e77", "#d68189", "#c6a49a", "#c6e5d9", "#f4ead5"}, + {"#3fb8af", "#7fc7af", "#dad8a7", "#ff9e9d", "#ff3d7f"}, + {"#d9ceb2", "#948c75", "#d5ded9", "#7a6a53", "#99b2b7"}, + {"#ffffff", "#cbe86b", "#f2e9e1", "#1c140d", "#cbe86b"}, + {"#efffcd", "#dce9be", "#555152", "#2e2633", "#99173c"}, + {"#343838", "#005f6b", "#008c9e", "#00b4cc", "#00dffc"}, + {"#413e4a", "#73626e", "#b38184", "#f0b49e", "#f7e4be"}, + {"#ff4e50", "#fc913a", "#f9d423", "#ede574", "#e1f5c4"}, + {"#99b898", "#fecea8", "#ff847c", "#e84a5f", "#2a363b"}, + {"#655643", "#80bca3", "#f6f7bd", "#e6ac27", "#bf4d28"}, + {"#00a8c6", "#40c0cb", "#f9f2e7", "#aee239", "#8fbe00"}, + {"#351330", "#424254", "#64908a", "#e8caa4", "#cc2a41"}, + {"#554236", "#f77825", "#d3ce3d", "#f1efa5", "#60b99a"}, + {"#5d4157", "#838689", "#a8caba", "#cad7b2", "#ebe3aa"}, + {"#8c2318", "#5e8c6a", "#88a65e", "#bfb35a", "#f2c45a"}, + {"#fad089", "#ff9c5b", "#f5634a", "#ed303c", "#3b8183"}, + {"#ff4242", "#f4fad2", "#d4ee5e", "#e1edb9", "#f0f2eb"}, + {"#f8b195", "#f67280", "#c06c84", "#6c5b7b", "#355c7d"}, + {"#d1e751", "#ffffff", "#000000", "#4dbce9", "#26ade4"}, + {"#1b676b", "#519548", "#88c425", "#bef202", "#eafde6"}, + {"#5e412f", "#fcebb6", "#78c0a8", "#f07818", "#f0a830"}, + {"#bcbdac", "#cfbe27", "#f27435", "#f02475", "#3b2d38"}, + {"#452632", "#91204d", "#e4844a", "#e8bf56", "#e2f7ce"}, + {"#eee6ab", "#c5bc8e", "#696758", "#45484b", "#36393b"}, + {"#f0d8a8", "#3d1c00", "#86b8b1", "#f2d694", "#fa2a00"}, + {"#2a044a", "#0b2e59", "#0d6759", "#7ab317", "#a0c55f"}, + {"#f04155", "#ff823a", "#f2f26f", "#fff7bd", "#95cfb7"}, + {"#b9d7d9", "#668284", "#2a2829", "#493736", "#7b3b3b"}, + {"#bbbb88", "#ccc68d", "#eedd99", "#eec290", "#eeaa88"}, + {"#b3cc57", "#ecf081", "#ffbe40", "#ef746f", "#ab3e5b"}, + {"#a3a948", "#edb92e", "#f85931", "#ce1836", "#009989"}, + {"#300030", "#480048", "#601848", "#c04848", "#f07241"}, + {"#67917a", "#170409", "#b8af03", "#ccbf82", "#e33258"}, + {"#aab3ab", "#c4cbb7", "#ebefc9", "#eee0b7", "#e8caaf"}, + {"#e8d5b7", "#0e2430", "#fc3a51", "#f5b349", "#e8d5b9"}, + {"#ab526b", "#bca297", "#c5ceae", "#f0e2a4", "#f4ebc3"}, + {"#607848", "#789048", "#c0d860", "#f0f0d8", "#604848"}, + {"#b6d8c0", "#c8d9bf", "#dadabd", "#ecdbbc", "#fedcba"}, + {"#a8e6ce", "#dcedc2", "#ffd3b5", "#ffaaa6", "#ff8c94"}, + {"#3e4147", "#fffedf", "#dfba69", "#5a2e2e", "#2a2c31"}, + {"#fc354c", "#29221f", "#13747d", "#0abfbc", "#fcf7c5"}, + {"#cc0c39", "#e6781e", "#c8cf02", "#f8fcc1", "#1693a7"}, + {"#1c2130", "#028f76", "#b3e099", "#ffeaad", "#d14334"}, + {"#a7c5bd", "#e5ddcb", "#eb7b59", "#cf4647", "#524656"}, + {"#dad6ca", "#1bb0ce", "#4f8699", "#6a5e72", "#563444"}, + {"#5c323e", "#a82743", "#e15e32", "#c0d23e", "#e5f04c"}, + {"#edebe6", "#d6e1c7", "#94c7b6", "#403b33", "#d3643b"}, + {"#fdf1cc", "#c6d6b8", "#987f69", "#e3ad40", "#fcd036"}, + {"#230f2b", "#f21d41", "#ebebbc", "#bce3c5", "#82b3ae"}, + {"#b9d3b0", "#81bda4", "#b28774", "#f88f79", "#f6aa93"}, + {"#3a111c", "#574951", "#83988e", "#bcdea5", "#e6f9bc"}, + {"#5e3929", "#cd8c52", "#b7d1a3", "#dee8be", "#fcf7d3"}, + {"#1c0113", "#6b0103", "#a30006", "#c21a01", "#f03c02"}, + {"#000000", "#9f111b", "#b11623", "#292c37", "#cccccc"}, + {"#382f32", "#ffeaf2", "#fcd9e5", "#fbc5d8", "#f1396d"}, + {"#e3dfba", "#c8d6bf", "#93ccc6", "#6cbdb5", "#1a1f1e"}, + {"#f6f6f6", "#e8e8e8", "#333333", "#990100", "#b90504"}, + {"#1b325f", "#9cc4e4", "#e9f2f9", "#3a89c9", "#f26c4f"}, + {"#a1dbb2", "#fee5ad", "#faca66", "#f7a541", "#f45d4c"}, + {"#c1b398", "#605951", "#fbeec2", "#61a6ab", "#accec0"}, + {"#5e9fa3", "#dcd1b4", "#fab87f", "#f87e7b", "#b05574"}, + {"#951f2b", "#f5f4d7", "#e0dfb1", "#a5a36c", "#535233"}, + {"#8dccad", "#988864", "#fea6a2", "#f9d6ac", "#ffe9af"}, + {"#2d2d29", "#215a6d", "#3ca2a2", "#92c7a3", "#dfece6"}, + {"#413d3d", "#040004", "#c8ff00", "#fa023c", "#4b000f"}, + {"#eff3cd", "#b2d5ba", "#61ada0", "#248f8d", "#605063"}, + {"#ffefd3", "#fffee4", "#d0ecea", "#9fd6d2", "#8b7a5e"}, + {"#cfffdd", "#b4dec1", "#5c5863", "#a85163", "#ff1f4c"}, + {"#9dc9ac", "#fffec7", "#f56218", "#ff9d2e", "#919167"}, + {"#4e395d", "#827085", "#8ebe94", "#ccfc8e", "#dc5b3e"}, + {"#a8a7a7", "#cc527a", "#e8175d", "#474747", "#363636"}, + {"#f8edd1", "#d88a8a", "#474843", "#9d9d93", "#c5cfc6"}, + {"#046d8b", "#309292", "#2fb8ac", "#93a42a", "#ecbe13"}, + {"#f38a8a", "#55443d", "#a0cab5", "#cde9ca", "#f1edd0"}, + {"#a70267", "#f10c49", "#fb6b41", "#f6d86b", "#339194"}, + {"#ff003c", "#ff8a00", "#fabe28", "#88c100", "#00c176"}, + {"#ffedbf", "#f7803c", "#f54828", "#2e0d23", "#f8e4c1"}, + {"#4e4d4a", "#353432", "#94ba65", "#2790b0", "#2b4e72"}, + {"#0ca5b0", "#4e3f30", "#fefeeb", "#f8f4e4", "#a5b3aa"}, + {"#4d3b3b", "#de6262", "#ffb88c", "#ffd0b3", "#f5e0d3"}, + {"#fffbb7", "#a6f6af", "#66b6ab", "#5b7c8d", "#4f2958"}, + {"#edf6ee", "#d1c089", "#b3204d", "#412e28", "#151101"}, + {"#9d7e79", "#ccac95", "#9a947c", "#748b83", "#5b756c"}, + {"#fcfef5", "#e9ffe1", "#cdcfb7", "#d6e6c3", "#fafbe3"}, + {"#9cddc8", "#bfd8ad", "#ddd9ab", "#f7af63", "#633d2e"}, + {"#30261c", "#403831", "#36544f", "#1f5f61", "#0b8185"}, + {"#aaff00", "#ffaa00", "#ff00aa", "#aa00ff", "#00aaff"}, + {"#d1313d", "#e5625c", "#f9bf76", "#8eb2c5", "#615375"}, + {"#ffe181", "#eee9e5", "#fad3b2", "#ffba7f", "#ff9c97"}, + {"#73c8a9", "#dee1b6", "#e1b866", "#bd5532", "#373b44"}, + {"#805841", "#dcf7f3", "#fffcdd", "#ffd8d8", "#f5a2a2"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/company.go b/vendor/github.com/brianvoe/gofakeit/v7/data/company.go new file mode 100644 index 0000000000..43b6a2dc93 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/company.go @@ -0,0 +1,10 @@ +package data + +// Company consists of company information +var Company = map[string][]string{ + "name": {"3 Round Stones, Inc.", "48 Factoring Inc.", "5PSolutions", "Abt Associates", "Accela", "Accenture", "AccuWeather", "Acxiom", "Adaptive", "Adobe Digital Government", "Aidin", "Alarm.com", "Allianz", "Allied Van Lines", "AllState Insurance Group", "Alltuition", "Altova", "Amazon Web Services", "American Red Ball Movers", "Amida Technology Solutions", "Analytica", "Apextech LLC", "Appallicious", "Aquicore", "Archimedes Inc.", "AreaVibes Inc.", "Arpin Van Lines", "Arrive Labs", "ASC Partners", "Asset4", "Atlas Van Lines", "AtSite", "Aunt Bertha, Inc.", "Aureus Sciences (*Now part of Elsevier)", "AutoGrid Systems", "Avalara", "Avvo", "Ayasdi", "Azavea", "BaleFire Global", "Barchart", "Be Informed", "Bekins", "Berkery Noyes MandASoft", "Berkshire Hathaway", "BetterLesson", "BillGuard", "Bing", "Biovia", "BizVizz", "BlackRock", "Bloomberg", "Booz Allen Hamilton", "Boston Consulting Group", "Boundless", "Bridgewater", "Brightscope", "BuildFax", "Buildingeye", "BuildZoom", "Business and Legal Resources", "Business Monitor International", "Calcbench, Inc.", "Cambridge Information Group", "Cambridge Semantics", "CAN Capital", "Canon", "Capital Cube", "Cappex", "Captricity", "CareSet Systems", "Careset.com", "CARFAX", "Caspio", "Castle Biosciences", "CB Insights", "Ceiba Solutions", "Center for Responsive Politics", "Cerner", "Certara", "CGI", "Charles River Associates", "Charles Schwab Corp.", "Chemical Abstracts Service", "Child Care Desk", "Chubb", "Citigroup", "CityScan", "CitySourced", "Civic Impulse LLC", "Civic Insight", "Civinomics", "Civis Analytics", "Clean Power Finance", "ClearHealthCosts", "ClearStory Data", "Climate Corporation", "CliniCast", "Cloudmade", "Cloudspyre", "Code for America", "Code-N", "Collective IP", "College Abacus, an ECMC initiative", "College Board", "Compared Care", "Compendia Bioscience Life Technologies", "Compliance and Risks", "Computer Packages Inc", "CONNECT-DOT LLC.", "ConnectEDU", "Connotate", "Construction Monitor LLC", "Consumer Reports", "CoolClimate", "Copyright Clearance Center", "CoreLogic", "CostQuest", "Credit Karma", "Credit Sesame", "CrowdANALYTIX", "Dabo Health", "DataLogix", "DataMade", "DataMarket", "Datamyne", "DataWeave", "Deloitte", "DemystData", "Department of Better Technology", "Development Seed", "Docket Alarm, Inc.", "Dow Jones & Co.", "Dun & Bradstreet", "Earth Networks", "EarthObserver App", "Earthquake Alert!", "Eat Shop Sleep", "Ecodesk", "eInstitutional", "Embark", "EMC", "Energy Points, Inc.", "Energy Solutions Forum", "Enervee Corporation", "Enigma.io", "Ensco", "Environmental Data Resources", "Epsilon", "Equal Pay for Women", "Equifax", "Equilar", "Ernst & Young LLP", "eScholar LLC.", "Esri", "Estately", "Everyday Health", "Evidera", "Experian", "Expert Health Data Programming, Inc.", "Exversion", "Ez-XBRL", "Factset", "Factual", "Farmers", "FarmLogs", "Fastcase", "Fidelity Investments", "FindTheBest.com", "First Fuel Software", "FirstPoint, Inc.", "Fitch", "FlightAware", "FlightStats", "FlightView", "Food+Tech Connect", "Forrester Research", "Foursquare", "Fujitsu", "Funding Circle", "FutureAdvisor", "Fuzion Apps, Inc.", "Gallup", "Galorath Incorporated", "Garmin", "Genability", "GenoSpace", "Geofeedia", "Geolytics", "Geoscape", "GetRaised", "GitHub", "Glassy Media", "Golden Helix", "GoodGuide", "Google Maps", "Google Public Data Explorer", "Government Transaction Services", "Govini", "GovTribe", "Govzilla, Inc.", "gRadiant Research LLC", "Graebel Van Lines", "Graematter, Inc.", "Granicus", "GreatSchools", "GuideStar", "H3 Biomedicine", "Harris Corporation", "HDScores, Inc", "Headlight", "Healthgrades", "Healthline", "HealthMap", "HealthPocket, Inc.", "HelloWallet", "HERE", "Honest Buildings", "HopStop", "Housefax", "How's My Offer?", "IBM", "ideas42", "iFactor Consulting", "IFI CLAIMS Patent Services", "iMedicare", "Impact Forecasting (Aon)", "Impaq International", "Import.io", "IMS Health", "InCadence", "indoo.rs", "InfoCommerce Group", "Informatica", "InnoCentive", "Innography", "Innovest Systems", "Inovalon", "Inrix Traffic", "Intelius", "Intermap Technologies", "Investormill", "Iodine", "IPHIX", "iRecycle", "iTriage", "IVES Group Inc", "IW Financial", "JJ Keller", "J.P. Morgan Chase", "Junar, Inc.", "Junyo", "Jurispect", "Kaiser Permanante", "karmadata", "Keychain Logistics Corp.", "KidAdmit, Inc.", "Kimono Labs", "KLD Research", "Knoema", "Knowledge Agency", "KPMG", "Kroll Bond Ratings Agency", "Kyruus", "Lawdragon", "Legal Science Partners", "(Leg)Cyte", "LegiNation, Inc.", "LegiStorm", "Lenddo", "Lending Club", "Level One Technologies", "LexisNexis", "Liberty Mutual Insurance Cos.", "Lilly Open Innovation Drug Discovery", "Liquid Robotics", "Locavore", "LOGIXDATA, LLC", "LoopNet", "Loqate, Inc.", "LoseIt.com", "LOVELAND Technologies", "Lucid", "Lumesis, Inc.", "Mango Transit", "Mapbox", "Maponics", "MapQuest", "Marinexplore, Inc.", "MarketSense", "Marlin & Associates", "Marlin Alter and Associates", "McGraw Hill Financial", "McKinsey", "MedWatcher", "Mercaris", "Merrill Corp.", "Merrill Lynch", "MetLife", "mHealthCoach", "MicroBilt Corporation", "Microsoft Windows Azure Marketplace", "Mint", "Moody's", "Morgan Stanley", "Morningstar, Inc.", "Mozio", "MuckRock.com", "Munetrix", "Municode", "National Van Lines", "Nationwide Mutual Insurance Company", "Nautilytics", "Navico", "NERA Economic Consulting", "NerdWallet", "New Media Parents", "Next Step Living", "NextBus", "nGAP Incorporated", "Nielsen", "Noesis", "NonprofitMetrics", "North American Van Lines", "Noveda Technologies", "NuCivic", "Numedii", "Oliver Wyman", "OnDeck", "OnStar", "Ontodia, Inc", "Onvia", "Open Data Nation", "OpenCounter", "OpenGov", "OpenPlans", "OpportunitySpace, Inc.", "Optensity", "optiGov", "OptumInsight", "Orlin Research", "OSIsoft", "OTC Markets", "Outline", "Oversight Systems", "Overture Technologies", "Owler", "Palantir Technologies", "Panjiva", "Parsons Brinckerhoff", "Patently-O", "PatientsLikeMe", "Pave", "Paxata", "PayScale, Inc.", "PeerJ", "People Power", "Persint", "Personal Democracy Media", "Personal, Inc.", "Personalis", "Peterson's", "PEV4me.com", "PIXIA Corp", "PlaceILive.com", "PlanetEcosystems", "PlotWatt", "Plus-U", "PolicyMap", "Politify", "Poncho App", "POPVOX", "Porch", "PossibilityU", "PowerAdvocate", "Practice Fusion", "Predilytics", "PricewaterhouseCoopers (PWC)", "ProgrammableWeb", "Progressive Insurance Group", "Propeller Health", "ProPublica", "PublicEngines", "PYA Analytics", "Qado Energy, Inc.", "Quandl", "Quertle", "Quid", "R R Donnelley", "RAND Corporation", "Rand McNally", "Rank and Filed", "Ranku", "Rapid Cycle Solutions", "realtor.com", "Recargo", "ReciPal", "Redfin", "RedLaser", "Reed Elsevier", "REI Systems", "Relationship Science", "Remi", "Retroficiency", "Revaluate", "Revelstone", "Rezolve Group", "Rivet Software", "Roadify Transit", "Robinson + Yu", "Russell Investments", "Sage Bionetworks", "SAP", "SAS", "Scale Unlimited", "Science Exchange", "Seabourne", "SeeClickFix", "SigFig", "Simple Energy", "SimpleTuition", "SlashDB", "Smart Utility Systems", "SmartAsset", "SmartProcure", "Smartronix", "SnapSense", "Social Explorer", "Social Health Insights", "SocialEffort Inc", "Socrata", "Solar Census", "SolarList", "Sophic Systems Alliance", "S&P Capital IQ", "SpaceCurve", "SpeSo Health", "Spikes Cavell Analytic Inc", "Splunk", "Spokeo", "SpotCrime", "SpotHero.com", "Stamen Design", "Standard and Poor's", "State Farm Insurance", "Sterling Infosystems", "Stevens Worldwide Van Lines", "STILLWATER SUPERCOMPUTING INC", "StockSmart", "Stormpulse", "StreamLink Software", "StreetCred Software, Inc", "StreetEasy", "Suddath", "Symcat", "Synthicity", "T. Rowe Price", "Tableau Software", "TagniFi", "Telenav", "Tendril", "Teradata", "The Advisory Board Company", "The Bridgespan Group", "The DocGraph Journal", "The Govtech Fund", "The Schork Report", "The Vanguard Group", "Think Computer Corporation", "Thinknum", "Thomson Reuters", "TopCoder", "TowerData", "TransparaGov", "TransUnion", "TrialTrove", "TrialX", "Trintech", "TrueCar", "Trulia", "TrustedID", "TuvaLabs", "Uber", "Unigo LLC", "United Mayflower", "Urban Airship", "Urban Mapping, Inc", "US Green Data", "U.S. News Schools", "USAA Group", "USSearch", "Verdafero", "Vimo", "VisualDoD, LLC", "Vital Axiom | Niinja", "VitalChek", "Vitals", "Vizzuality", "Votizen", "Walk Score", "WaterSmart Software", "WattzOn", "Way Better Patents", "Weather Channel", "Weather Decision Technologies", "Weather Underground", "WebFilings", "Webitects", "WebMD", "Weight Watchers", "WeMakeItSafer", "Wheaton World Wide Moving", "Whitby Group", "Wolfram Research", "Wolters Kluwer", "Workhands", "Xatori", "Xcential", "xDayta", "Xignite", "Yahoo", "Zebu Compliance Solutions", "Yelp", "YourMapper", "Zillow", "ZocDoc", "Zonability", "Zoner", "Zurich Insurance (Risk Room)"}, + "suffix": {"Inc", "and Sons", "LLC", "Group"}, + "buzzwords": {"Adaptive", "Advanced", "Ameliorated", "Assimilated", "Automated", "Balanced", "Business-focused", "Centralized", "Cloned", "Compatible", "Configurable", "Cross-group", "Cross-platform", "Customer-focused", "Customizable", "De-engineered", "Decentralized", "Devolved", "Digitized", "Distributed", "Diverse", "Down-sized", "Enhanced", "Enterprise-wide", "Ergonomic", "Exclusive", "Expanded", "Extended", "Face to face", "Focused", "Front-line", "Fully-configurable", "Function-based", "Fundamental", "Future-proofed", "Grass-roots", "Horizontal", "Implemented", "Innovative", "Integrated", "Intuitive", "Inverse", "Managed", "Mandatory", "Monitored", "Multi-channelled", "Multi-lateral", "Multi-layered", "Multi-tiered", "Networked", "Object-based", "Open-architected", "Open-source", "Operative", "Optimized", "Optional", "Organic", "Organized", "Persevering", "Persistent", "Phased", "Polarised", "Pre-emptive", "Proactive", "Profit-focused", "Profound", "Programmable", "Progressive", "Public-key", "Quality-focused", "Re-contextualized", "Re-engineered", "Reactive", "Realigned", "Reduced", "Reverse-engineered", "Right-sized", "Robust", "Seamless", "Secured", "Self-enabling", "Sharable", "Stand-alone", "Streamlined", "Switchable", "Synchronised", "Synergistic", "Synergized", "Team-oriented", "Total", "Triple-buffered", "Universal", "Up-sized", "Upgradable", "User-centric", "User-friendly", "Versatile", "Virtual", "Vision-oriented", "Visionary", "24 hour", "24/7", "3rd generation", "4th generation", "5th generation", "6th generation", "actuating", "analyzing", "asymmetric", "asynchronous", "attitude-oriented", "background", "bandwidth-monitored", "bi-directional", "bifurcated", "bottom-line", "clear-thinking", "client-driven", "client-server", "coherent", "cohesive", "composite", "content-based", "context-sensitive", "contextually-based", "dedicated", "demand-driven", "didactic", "directional", "discrete", "disintermediate", "dynamic", "eco-centric", "empowering", "encompassing", "even-keeled", "executive", "explicit", "exuding", "fault-tolerant", "foreground", "fresh-thinking", "full-range", "global", "grid-enabled", "heuristic", "high-level", "holistic", "homogeneous", "human-resource", "hybrid", "impactful", "incremental", "intangible", "interactive", "intermediate", "leading edge", "local", "logistical", "maximized", "methodical", "mission-critical", "mobile", "modular", "motivating", "multi-state", "multi-tasking", "multimedia", "national", "needs-based", "neutral", "next generation", "non-volatile", "object-oriented", "optimal", "optimizing", "radical", "real-time", "reciprocal", "regional", "responsive", "scalable", "secondary", "solution-oriented", "stable", "static", "system-worthy", "systematic", "systemic", "tangible", "tertiary", "transitional", "uniform", "upward-trending", "user-facing", "value-added", "web-enabled", "well-modulated", "zero administration", "zero defect", "zero tolerance", "Graphic Interface", "Graphical User Interface", "ability", "access", "adapter", "algorithm", "alliance", "analyzer", "application", "approach", "architecture", "archive", "array", "artificial intelligence", "attitude", "benchmark", "budgetary management", "capability", "capacity", "challenge", "circuit", "collaboration", "complexity", "concept", "conglomeration", "contingency", "core", "customer loyalty", "data-warehouse", "database", "definition", "emulation", "encoding", "encryption", "extranet", "firmware", "flexibility", "focus group", "forecast", "frame", "framework", "function", "functionalities", "groupware", "hardware", "help-desk", "hierarchy", "hub", "implementation", "info-mediaries", "infrastructure", "initiative", "installation", "instruction set", "interface", "internet solution", "intranet", "knowledge base", "knowledge user", "leverage", "local area network", "matrices", "matrix", "methodology", "middleware", "migration", "model", "moderator", "monitoring", "moratorium", "neural-net", "open architecture", "open system", "orchestration", "paradigm", "parallelism", "policy", "portal", "pricing structure", "process improvement", "product", "productivity", "project", "projection", "protocol", "secured line", "service-desk", "software", "solution", "standardization", "strategy", "structure", "success", "superstructure", "support", "synergy", "system engine", "task-force", "throughput", "time-frame", "toolset", "utilisation", "website", "workforce"}, + "bs": {"aggregate", "architect", "benchmark", "brand", "cultivate", "deliver", "deploy", "disintermediate", "drive", "e-enable", "embrace", "empower", "enable", "engage", "engineer", "enhance", "envisioneer", "evolve", "expedite", "exploit", "extend", "facilitate", "generate", "grow", "harness", "implement", "incentivize", "incubate", "innovate", "integrate", "iterate", "leverage", "matrix", "maximize", "mesh", "monetize", "morph", "optimize", "orchestrate", "productize", "recontextualize", "redefine", "reintermediate", "reinvent", "repurpose", "revolutionize", "scale", "seize", "strategize", "streamline", "syndicate", "synergize", "synthesize", "target", "transform", "transition", "unleash", "utilize", "visualize", "whiteboard", "24-365", "24-7", "B2B", "B2C", "back-end", "best-of-breed", "bleeding-edge", "bricks-and-clicks", "clicks-and-mortar", "collaborative", "compelling", "cross-media", "cross-platform", "customized", "cutting-edge", "distributed", "dot-com", "dynamic", "e-business", "efficient", "end-to-end", "enterprise", "extensible", "frictionless", "front-end", "global", "granular", "holistic", "impactful", "innovative", "integrated", "interactive", "intuitive", "killer", "leading-edge", "magnetic", "mission-critical", "next-generation", "one-to-one", "open-source", "out-of-the-box", "plug-and-play", "proactive", "real-time", "revolutionary", "rich", "robust", "scalable", "seamless", "sexy", "sticky", "strategic", "synergistic", "transparent", "turn-key", "ubiquitous", "user-centric", "value-added", "vertical", "viral", "virtual", "visionary", "web-enabled", "wireless", "world-class", "ROI", "action-items", "applications", "architectures", "bandwidth", "channels", "communities", "content", "convergence", "deliverables", "e-business", "e-commerce", "e-markets", "e-services", "e-tailers", "experiences", "eyeballs", "functionalities", "infomediaries", "infrastructures", "initiatives", "interfaces", "markets", "methodologies", "metrics", "mindshare", "models", "networks", "niches", "paradigms", "partnerships", "platforms", "portals", "relationships", "schemas", "solutions", "supply-chains", "synergies", "systems", "technologies", "users", "vortals", "web services", "web-readiness"}, + "blurb": {"Advancement", "Advantage", "Ambition", "Balance", "Belief", "Benefits", "Care", "Challenge", "Change", "Choice", "Commitment", "Comfort", "Connection", "Consistency", "Creativity", "Dedication", "Discovery", "Diversity", "Dream", "Dreams", "Drive", "Ease", "Efficiency", "Empowerment", "Endurance", "Energy", "Engagement", "Environment", "Enterprise", "Excellence", "Exclusivity", "Experience", "Exploration", "Expression", "Family", "Flexibility", "Focus", "Freedom", "Future", "Future", "Growth", "Harmony", "Health", "Heart", "History", "Home", "Honesty", "Hope", "Impact", "Innovation", "Inspiration", "Integrity", "Joy", "Journey", "Knowledge", "Leadership", "Legacy", "Life", "Luxury", "Money", "Motivation", "Optimism", "Partnership", "Passion", "Peace", "People", "Performance", "Perseverance", "Pleasure", "Power", "Pride", "Progress", "Promise", "Quality", "Quality", "Reliability", "Resilience", "Respect", "Revolution", "Safety", "Service", "Simplicity", "Solutions", "Solidarity", "Strength", "Style", "Success", "Sustainability", "Taste", "Teamwork", "Technology", "Time", "Transformation", "Trust", "Unity", "Value", "Versatility", "Vision", "Wellness", "World"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/computer.go b/vendor/github.com/brianvoe/gofakeit/v7/data/computer.go new file mode 100644 index 0000000000..b682c6f820 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/computer.go @@ -0,0 +1,8 @@ +package data + +// Computer consists of computer information +var Computer = map[string][]string{ + "linux_processor": {"i686", "x86_64"}, + "mac_processor": {"Intel", "PPC", "U; Intel", "U; PPC"}, + "windows_platform": {"Windows NT 6.2", "Windows NT 6.1", "Windows NT 6.0", "Windows NT 5.2", "Windows NT 5.1", "Windows NT 5.01", "Windows NT 5.0", "Windows NT 4.0", "Windows 98; Win 9x 4.90", "Windows 98", "Windows 95", "Windows CE"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/currency.go b/vendor/github.com/brianvoe/gofakeit/v7/data/currency.go new file mode 100644 index 0000000000..792fc710d5 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/currency.go @@ -0,0 +1,7 @@ +package data + +// Currency consists of currency information +var Currency = map[string][]string{ + "short": {"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN", "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD", "CAD", "CDF", "CHF", "CLP", "CNY", "COP", "CRC", "CUC", "CUP", "CVE", "CZK", "DJF", "DKK", "DOP", "DZD", "EGP", "ERN", "ETB", "EUR", "FJD", "FKP", "GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", "HKD", "HNL", "HTG", "HUF", "IDR", "ILS", "INR", "IQD", "IRR", "ISK", "JMD", "JOD", "JPY", "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL", "LYD", "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRU", "MUR", "MVR", "MWK", "MXN", "MYR", "MZN", "NAD", "NGN", "NIO", "NOK", "NPR", "NZD", "OMR", "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG", "QAR", "RON", "RSD", "RUB", "RWF", "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "STN", "SVC", "SYP", "SZL", "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS", "UAH", "UGX", "USD", "UYU", "UZS", "VES", "VND", "VUV", "WST", "XAF", "XCD", "XDR", "XOF", "XPF", "YER", "ZAR", "ZMW", "ZWL"}, + "long": {"United Arab Emirates Dirham", "Afghanistan Afghani", "Albania Lek", "Armenia Dram", "Netherlands Antilles Guilder", "Angola Kwanza", "Argentina Peso", "Australia Dollar", "Aruba Guilder", "Azerbaijan New Manat", "Bosnia and Herzegovina Convertible Marka", "Barbados Dollar", "Bangladesh Taka", "Bulgaria Lev", "Bahrain Dinar", "Burundi Franc", "Bermuda Dollar", "Brunei Darussalam Dollar", "Bolivia Boliviano", "Brazil Real", "Bahamas Dollar", "Bhutan Ngultrum", "Botswana Pula", "Belarus Ruble", "Belize Dollar", "Canada Dollar", "Congo/Kinshasa Franc", "Switzerland Franc", "Chile Peso", "China Yuan Renminbi", "Colombia Peso", "Costa Rica Colon", "Cuba Convertible Peso", "Cuba Peso", "Cape Verde Escudo", "Czech Republic Koruna", "Djibouti Franc", "Denmark Krone", "Dominican Republic Peso", "Algeria Dinar", "Egypt Pound", "Eritrea Nakfa", "Ethiopia Birr", "Euro Member Countries", "Fiji Dollar", "Falkland Islands (Malvinas) Pound", "United Kingdom Pound", "Georgia Lari", "Ghana Cedi", "Gibraltar Pound", "Gambia Dalasi", "Guinea Franc", "Guatemala Quetzal", "Guyana Dollar", "Hong Kong Dollar", "Honduras Lempira", "Haiti Gourde", "Hungary Forint", "Indonesia Rupiah", "Israel Shekel", "India Rupee", "Iraq Dinar", "Iran Rial", "Iceland Krona", "Jamaica Dollar", "Jordan Dinar", "Japan Yen", "Kenya Shilling", "Kyrgyzstan Som", "Cambodia Riel", "Comoros Franc", "Korea (North) Won", "Korea (South) Won", "Kuwait Dinar", "Cayman Islands Dollar", "Kazakhstan Tenge", "Laos Kip", "Lebanon Pound", "Sri Lanka Rupee", "Liberia Dollar", "Lesotho Loti", "Libya Dinar", "Morocco Dirham", "Moldova Leu", "Madagascar Ariary", "Macedonia Denar", "Myanmar (Burma) Kyat", "Mongolia Tughrik", "Macau Pataca", "Mauritania Ouguiya", "Mauritius Rupee", "Maldives (Maldive Islands) Rufiyaa", "Malawi Kwacha", "Mexico Peso", "Malaysia Ringgit", "Mozambique Metical", "Namibia Dollar", "Nigeria Naira", "Nicaragua Cordoba", "Norway Krone", "Nepal Rupee", "New Zealand Dollar", "Oman Rial", "Panama Balboa", "Peru Nuevo Sol", "Papua New Guinea Kina", "Philippines Peso", "Pakistan Rupee", "Poland Zloty", "Paraguay Guarani", "Qatar Riyal", "Romania New Leu", "Serbia Dinar", "Russia Ruble", "Rwanda Franc", "Saudi Arabia Riyal", "Solomon Islands Dollar", "Seychelles Rupee", "Sudan Pound", "Sweden Krona", "Singapore Dollar", "Saint Helena Pound", "Sierra Leone Leone", "Somalia Shilling", "Suriname Dollar", "São Tomé and Príncipe Dobra", "El Salvador Colon", "Syria Pound", "Swaziland Lilangeni", "Thailand Baht", "Tajikistan Somoni", "Turkmenistan Manat", "Tunisia Dinar", "Tonga Pa'anga", "Turkey Lira", "Trinidad and Tobago Dollar", "Taiwan New Dollar", "Tanzania Shilling", "Ukraine Hryvnia", "Uganda Shilling", "United States Dollar", "Uruguay Peso", "Uzbekistan Som", "Venezuela Bolivar", "Viet Nam Dong", "Vanuatu Vatu", "Samoa Tala", "Communauté Financière Africaine (BEAC) CFA Franc BEAC", "East Caribbean Dollar", "International Monetary Fund (IMF) Special Drawing Rights", "Communauté Financière Africaine (BCEAO) Franc", "Comptoirs Français du Pacifique (CFP) Franc", "Yemen Rial", "South Africa Rand", "Zambia Kwacha", "Zimbabwe Dollar"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/data.go b/vendor/github.com/brianvoe/gofakeit/v7/data/data.go new file mode 100644 index 0000000000..1b8731ff2e --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/data.go @@ -0,0 +1,92 @@ +package data + +// Data consists of the main set of fake information +var Data = map[string]map[string][]string{ + "person": Person, + "address": Address, + "company": Company, + "job": Job, + "lorem": Lorem, + "language": Languages, + "internet": Internet, + "file": Files, + "color": Colors, + "computer": Computer, + "hipster": Hipster, + "beer": Beer, + "hacker": Hacker, + "animal": Animal, + "currency": Currency, + "log_level": LogLevels, + "timezone": TimeZone, + "car": Car, + "emoji": Emoji, + "word": Word, + "sentence": Sentence, + "food": Food, + "minecraft": Minecraft, + "celebrity": Celebrity, + "error": Error, + "html": Html, + "book": Books, + "movie": Movies, + "school": School, + "song": Songs, + "product": Product, + "bank": Bank, +} + +func List() map[string][]string { + var list = make(map[string][]string) + + // Loop through the data and add the keys to the list + for key := range Data { + list[key] = []string{} + + // Loop through the sub data and add the keys to the list + for subkey := range Data[key] { + list[key] = append(list[key], subkey) + } + } + + return list +} + +func Get(key string) map[string][]string { + // Make sure the key exists, if not return an empty map + if _, ok := Data[key]; !ok { + return make(map[string][]string) + } + + return Data[key] +} + +func Set(key string, data map[string][]string) { + Data[key] = data +} + +func Remove(key string) { + delete(Data, key) +} + +func GetSubData(key, subkey string) []string { + // Make sure the key exists, if not return an empty map + if _, ok := Data[key]; !ok { + return []string{} + } + + return Data[key][subkey] +} + +func SetSub(key, subkey string, data []string) { + // Make sure the key exists, if not add it + if _, ok := Data[key]; !ok { + Data[key] = make(map[string][]string) + } + + Data[key][subkey] = data +} + +func RemoveSub(key, subkey string) { + delete(Data[key], subkey) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/datetime.go b/vendor/github.com/brianvoe/gofakeit/v7/data/datetime.go new file mode 100644 index 0000000000..1007b76991 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/datetime.go @@ -0,0 +1,10 @@ +package data + +// TimeZone is an array of short and long timezones +var TimeZone = map[string][]string{ + "offset": {"-12", "-11", "-10", "-8", "-7", "-7", "-8", "-7", "-6", "-6", "-6", "-5", "-5", "-6", "-5", "-4", "-4", "-4.5", "-4", "-3", "-4", "-4", "-4", "-2.5", "-3", "-3", "-3", "-3", "-3", "-3", "-2", "-1", "0", "-1", "1", "0", "0", "1", "1", "0", "2", "2", "2", "2", "1", "1", "3", "3", "2", "3", "3", "2", "3", "3", "3", "2", "3", "3", "3", "3", "3", "3", "4", "4.5", "4", "5", "4", "4", "4", "4.5", "5", "5", "5", "5.5", "5.5", "5.75", "6", "6", "6.5", "7", "7", "8", "8", "8", "8", "8", "8", "9", "9", "9", "9.5", "9.5", "10", "10", "10", "10", "10", "11", "11", "12", "12", "12", "12", "13", "13", "13"}, + "abr": {"DST", "U", "HST", "AKDT", "PDT", "PDT", "PST", "UMST", "MDT", "MDT", "CAST", "CDT", "CDT", "CCST", "SPST", "EDT", "UEDT", "VST", "PYT", "ADT", "CBST", "SWST", "PSST", "NDT", "ESAST", "AST", "SEST", "GDT", "MST", "BST", "U", "MDT", "ADT", "CVST", "MDT", "UTC", "GMT", "BST", "GDT", "GST", "WEDT", "CEDT", "RDT", "CEDT", "WCAST", "NST", "GDT", "MEDT", "EST", "SDT", "EEDT", "SAST", "FDT", "TDT", "JDT", "LST", "JST", "AST", "KST", "AST", "EAST", "MSK", "SAMT", "IDT", "AST", "ADT", "MST", "GST", "CST", "AST", "WAST", "YEKT", "PKT", "IST", "SLST", "NST", "CAST", "BST", "MST", "SAST", "NCAST", "CST", "NAST", "MPST", "WAST", "TST", "UST", "NAEST", "JST", "KST", "CAST", "ACST", "EAST", "AEST", "WPST", "TST", "YST", "CPST", "VST", "NZST", "U", "FST", "MST", "KDT", "TST", "SST"}, + "text": {"Dateline Standard Time", "UTC-11", "Hawaiian Standard Time", "Alaskan Standard Time", "Pacific Standard Time (Mexico)", "Pacific Daylight Time", "Pacific Standard Time", "US Mountain Standard Time", "Mountain Standard Time (Mexico)", "Mountain Standard Time", "Central America Standard Time", "Central Standard Time", "Central Standard Time (Mexico)", "Canada Central Standard Time", "SA Pacific Standard Time", "Eastern Standard Time", "US Eastern Standard Time", "Venezuela Standard Time", "Paraguay Standard Time", "Atlantic Standard Time", "Central Brazilian Standard Time", "SA Western Standard Time", "Pacific SA Standard Time", "Newfoundland Standard Time", "E. South America Standard Time", "Argentina Standard Time", "SA Eastern Standard Time", "Greenland Standard Time", "Montevideo Standard Time", "Bahia Standard Time", "UTC-02", "Mid-Atlantic Standard Time", "Azores Standard Time", "Cape Verde Standard Time", "Morocco Standard Time", "UTC", "Greenwich Mean Time", "British Summer Time", "GMT Standard Time", "Greenwich Standard Time", "W. Europe Standard Time", "Central Europe Standard Time", "Romance Standard Time", "Central European Standard Time", "W. Central Africa Standard Time", "Namibia Standard Time", "GTB Standard Time", "Middle East Standard Time", "Egypt Standard Time", "Syria Standard Time", "E. Europe Standard Time", "South Africa Standard Time", "FLE Standard Time", "Turkey Standard Time", "Israel Standard Time", "Libya Standard Time", "Jordan Standard Time", "Arabic Standard Time", "Kaliningrad Standard Time", "Arab Standard Time", "E. Africa Standard Time", "Moscow Standard Time", "Samara Time", "Iran Standard Time", "Arabian Standard Time", "Azerbaijan Standard Time", "Mauritius Standard Time", "Georgian Standard Time", "Caucasus Standard Time", "Afghanistan Standard Time", "West Asia Standard Time", "Yekaterinburg Time", "Pakistan Standard Time", "India Standard Time", "Sri Lanka Standard Time", "Nepal Standard Time", "Central Asia Standard Time", "Bangladesh Standard Time", "Myanmar Standard Time", "SE Asia Standard Time", "N. Central Asia Standard Time", "China Standard Time", "North Asia Standard Time", "Singapore Standard Time", "W. Australia Standard Time", "Taipei Standard Time", "Ulaanbaatar Standard Time", "North Asia East Standard Time", "Japan Standard Time", "Korea Standard Time", "Cen. Australia Standard Time", "AUS Central Standard Time", "E. Australia Standard Time", "AUS Eastern Standard Time", "West Pacific Standard Time", "Tasmania Standard Time", "Yakutsk Standard Time", "Central Pacific Standard Time", "Vladivostok Standard Time", "New Zealand Standard Time", "UTC+12", "Fiji Standard Time", "Magadan Standard Time", "Kamchatka Standard Time", "Tonga Standard Time", "Samoa Standard Time"}, + "full": {"(UTC-12:00) International Date Line West", "(UTC-11:00) Coordinated Universal Time-11", "(UTC-10:00) Hawaii", "(UTC-09:00) Alaska", "(UTC-08:00) Baja California", "(UTC-07:00) Pacific Time (US & Canada)", "(UTC-08:00) Pacific Time (US & Canada)", "(UTC-07:00) Arizona", "(UTC-07:00) Chihuahua, La Paz, Mazatlan", "(UTC-07:00) Mountain Time (US & Canada)", "(UTC-06:00) Central America", "(UTC-06:00) Central Time (US & Canada)", "(UTC-06:00) Guadalajara, Mexico City, Monterrey", "(UTC-06:00) Saskatchewan", "(UTC-05:00) Bogota, Lima, Quito", "(UTC-05:00) Eastern Time (US & Canada)", "(UTC-05:00) Indiana (East)", "(UTC-04:30) Caracas", "(UTC-04:00) Asuncion", "(UTC-04:00) Atlantic Time (Canada)", "(UTC-04:00) Cuiaba", "(UTC-04:00) Georgetown, La Paz, Manaus, San Juan", "(UTC-04:00) Santiago", "(UTC-03:30) Newfoundland", "(UTC-03:00) Brasilia", "(UTC-03:00) Buenos Aires", "(UTC-03:00) Cayenne, Fortaleza", "(UTC-03:00) Greenland", "(UTC-03:00) Montevideo", "(UTC-03:00) Salvador", "(UTC-02:00) Coordinated Universal Time-02", "(UTC-02:00) Mid-Atlantic - Old", "(UTC-01:00) Azores", "(UTC-01:00) Cape Verde Is.", "(UTC) Casablanca", "(UTC) Coordinated Universal Time", "(UTC) Edinburgh, London", "(UTC+01:00) Edinburgh, London", "(UTC) Dublin, Lisbon", "(UTC) Monrovia, Reykjavik", "(UTC+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna", "(UTC+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague", "(UTC+01:00) Brussels, Copenhagen, Madrid, Paris", "(UTC+01:00) Sarajevo, Skopje, Warsaw, Zagreb", "(UTC+01:00) West Central Africa", "(UTC+01:00) Windhoek", "(UTC+02:00) Athens, Bucharest", "(UTC+02:00) Beirut", "(UTC+02:00) Cairo", "(UTC+02:00) Damascus", "(UTC+02:00) E. Europe", "(UTC+02:00) Harare, Pretoria", "(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius", "(UTC+03:00) Istanbul", "(UTC+02:00) Jerusalem", "(UTC+02:00) Tripoli", "(UTC+03:00) Amman", "(UTC+03:00) Baghdad", "(UTC+03:00) Kaliningrad, Minsk", "(UTC+03:00) Kuwait, Riyadh", "(UTC+03:00) Nairobi", "(UTC+03:00) Moscow, St. Petersburg, Volgograd", "(UTC+04:00) Samara, Ulyanovsk, Saratov", "(UTC+03:30) Tehran", "(UTC+04:00) Abu Dhabi, Muscat", "(UTC+04:00) Baku", "(UTC+04:00) Port Louis", "(UTC+04:00) Tbilisi", "(UTC+04:00) Yerevan", "(UTC+04:30) Kabul", "(UTC+05:00) Ashgabat, Tashkent", "(UTC+05:00) Yekaterinburg", "(UTC+05:00) Islamabad, Karachi", "(UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi", "(UTC+05:30) Sri Jayawardenepura", "(UTC+05:45) Kathmandu", "(UTC+06:00) Astana", "(UTC+06:00) Dhaka", "(UTC+06:30) Yangon (Rangoon)", "(UTC+07:00) Bangkok, Hanoi, Jakarta", "(UTC+07:00) Novosibirsk", "(UTC+08:00) Beijing, Chongqing, Hong Kong, Urumqi", "(UTC+08:00) Krasnoyarsk", "(UTC+08:00) Kuala Lumpur, Singapore", "(UTC+08:00) Perth", "(UTC+08:00) Taipei", "(UTC+08:00) Ulaanbaatar", "(UTC+09:00) Irkutsk", "(UTC+09:00) Osaka, Sapporo, Tokyo", "(UTC+09:00) Seoul", "(UTC+09:30) Adelaide", "(UTC+09:30) Darwin", "(UTC+10:00) Brisbane", "(UTC+10:00) Canberra, Melbourne, Sydney", "(UTC+10:00) Guam, Port Moresby", "(UTC+10:00) Hobart", "(UTC+10:00) Yakutsk", "(UTC+11:00) Solomon Is., New Caledonia", "(UTC+11:00) Vladivostok", "(UTC+12:00) Auckland, Wellington", "(UTC+12:00) Coordinated Universal Time+12", "(UTC+12:00) Fiji", "(UTC+12:00) Magadan", "(UTC+12:00) Petropavlovsk-Kamchatsky - Old", "(UTC+13:00) Nuku'alofa", "(UTC+13:00) Samoa"}, + "region": {"Africa/Abidjan", "Africa/Accra", "Africa/Addis_Ababa", "Africa/Algiers", "Africa/Asmara", "Africa/Bamako", "Africa/Bangui", "Africa/Banjul", "Africa/Bissau", "Africa/Blantyre", "Africa/Brazzaville", "Africa/Bujumbura", "Africa/Cairo", "Africa/Casablanca", "Africa/Ceuta", "Africa/Conakry", "Africa/Dakar", "Africa/Dar_es_Salaam", "Africa/Djibouti", "Africa/Douala", "Africa/El_Aaiun", "Africa/Freetown", "Africa/Gaborone", "Africa/Harare", "Africa/Johannesburg", "Africa/Juba", "Africa/Kampala", "Africa/Khartoum", "Africa/Kigali", "Africa/Kinshasa", "Africa/Lagos", "Africa/Libreville", "Africa/Lome", "Africa/Luanda", "Africa/Lubumbashi", "Africa/Lusaka", "Africa/Malabo", "Africa/Maputo", "Africa/Maseru", "Africa/Mbabane", "Africa/Mogadishu", "Africa/Monrovia", "Africa/Nairobi", "Africa/Ndjamena", "Africa/Niamey", "Africa/Nouakchott", "Africa/Ouagadougou", "Africa/Porto-Novo", "Africa/Sao_Tome", "Africa/Timbuktu", "Africa/Tripoli", "Africa/Tunis", "Africa/Windhoek", "America/Adak", "America/Anchorage", "America/Anguilla", "America/Antigua", "America/Araguaina", "America/Argentina/Buenos_Aires", "America/Argentina/Catamarca", "America/Argentina/ComodRivadavia", "America/Argentina/Cordoba", "America/Argentina/Jujuy", "America/Argentina/La_Rioja", "America/Argentina/Mendoza", "America/Argentina/Rio_Gallegos", "America/Argentina/Salta", "America/Argentina/San_Juan", "America/Argentina/San_Luis", "America/Argentina/Tucuman", "America/Argentina/Ushuaia", "America/Aruba", "America/Asuncion", "America/Atikokan", "America/Atka", "America/Bahia", "America/Bahia_Banderas", "America/Barbados", "America/Belem", "America/Belize", "America/Blanc-Sablon", "America/Boa_Vista", "America/Bogota", "America/Boise", "America/Buenos_Aires", "America/Cambridge_Bay", "America/Campo_Grande", "America/Cancun", "America/Caracas", "America/Catamarca", "America/Cayenne", "America/Cayman", "America/Chicago", "America/Chihuahua", "America/Coral_Harbour", "America/Cordoba", "America/Costa_Rica", "America/Creston", "America/Cuiaba", "America/Curacao", "America/Danmarkshavn", "America/Dawson", "America/Dawson_Creek", "America/Denver", "America/Detroit", "America/Dominica", "America/Edmonton", "America/Eirunepe", "America/El_Salvador", "America/Ensenada", "America/Fort_Nelson", "America/Fort_Wayne", "America/Fortaleza", "America/Glace_Bay", "America/Godthab", "America/Goose_Bay", "America/Grand_Turk", "America/Grenada", "America/Guadeloupe", "America/Guatemala", "America/Guayaquil", "America/Guyana", "America/Halifax", "America/Havana", "America/Hermosillo", "America/Indiana/Indianapolis", "America/Indiana/Knox", "America/Indiana/Marengo", "America/Indiana/Petersburg", "America/Indiana/Tell_City", "America/Indiana/Vevay", "America/Indiana/Vincennes", "America/Indiana/Winamac", "America/Indianapolis", "America/Inuvik", "America/Iqaluit", "America/Jamaica", "America/Jujuy", "America/Juneau", "America/Kentucky/Louisville", "America/Kentucky/Monticello", "America/Knox_IN", "America/Kralendijk", "America/La_Paz", "America/Lima", "America/Los_Angeles", "America/Louisville", "America/Lower_Princes", "America/Maceio", "America/Managua", "America/Manaus", "America/Marigot", "America/Martinique", "America/Matamoros", "America/Mazatlan", "America/Mendoza", "America/Menominee", "America/Merida", "America/Metlakatla", "America/Mexico_City", "America/Miquelon", "America/Moncton", "America/Monterrey", "America/Montevideo", "America/Montreal", "America/Montserrat", "America/Nassau", "America/New_York", "America/Nipigon", "America/Nome", "America/Noronha", "America/North_Dakota/Beulah", "America/North_Dakota/Center", "America/North_Dakota/New_Salem", "America/Ojinaga", "America/Panama", "America/Pangnirtung", "America/Paramaribo", "America/Phoenix", "America/Port_of_Spain", "America/Port-au-Prince", "America/Porto_Acre", "America/Porto_Velho", "America/Puerto_Rico", "America/Punta_Arenas", "America/Rainy_River", "America/Rankin_Inlet", "America/Recife", "America/Regina", "America/Resolute", "America/Rio_Branco", "America/Rosario", "America/Santa_Isabel", "America/Santarem", "America/Santiago", "America/Santo_Domingo", "America/Sao_Paulo", "America/Scoresbysund", "America/Shiprock", "America/Sitka", "America/St_Barthelemy", "America/St_Johns", "America/St_Kitts", "America/St_Lucia", "America/St_Thomas", "America/St_Vincent", "America/Swift_Current", "America/Tegucigalpa", "America/Thule", "America/Thunder_Bay", "America/Tijuana", "America/Toronto", "America/Tortola", "America/Vancouver", "America/Virgin", "America/Whitehorse", "America/Winnipeg", "America/Yakutat", "America/Yellowknife", "Antarctica/Casey", "Antarctica/Davis", "Antarctica/DumontDUrville", "Antarctica/Macquarie", "Antarctica/Mawson", "Antarctica/McMurdo", "Antarctica/Palmer", "Antarctica/Rothera", "Antarctica/South_Pole", "Antarctica/Syowa", "Antarctica/Troll", "Antarctica/Vostok", "Arctic/Longyearbyen", "Asia/Aden", "Asia/Almaty", "Asia/Amman", "Asia/Anadyr", "Asia/Aqtau", "Asia/Aqtobe", "Asia/Ashgabat", "Asia/Ashkhabad", "Asia/Atyrau", "Asia/Baghdad", "Asia/Bahrain", "Asia/Baku", "Asia/Bangkok", "Asia/Barnaul", "Asia/Beirut", "Asia/Bishkek", "Asia/Brunei", "Asia/Calcutta", "Asia/Chita", "Asia/Choibalsan", "Asia/Chongqing", "Asia/Chungking", "Asia/Colombo", "Asia/Dacca", "Asia/Damascus", "Asia/Dhaka", "Asia/Dili", "Asia/Dubai", "Asia/Dushanbe", "Asia/Famagusta", "Asia/Gaza", "Asia/Harbin", "Asia/Hebron", "Asia/Ho_Chi_Minh", "Asia/Hong_Kong", "Asia/Hovd", "Asia/Irkutsk", "Asia/Istanbul", "Asia/Jakarta", "Asia/Jayapura", "Asia/Jerusalem", "Asia/Kabul", "Asia/Kamchatka", "Asia/Karachi", "Asia/Kashgar", "Asia/Kathmandu", "Asia/Katmandu", "Asia/Khandyga", "Asia/Kolkata", "Asia/Krasnoyarsk", "Asia/Kuala_Lumpur", "Asia/Kuching", "Asia/Kuwait", "Asia/Macao", "Asia/Macau", "Asia/Magadan", "Asia/Makassar", "Asia/Manila", "Asia/Muscat", "Asia/Novokuznetsk", "Asia/Novosibirsk", "Asia/Omsk", "Asia/Oral", "Asia/Phnom_Penh", "Asia/Pontianak", "Asia/Pyongyang", "Asia/Qatar", "Asia/Qyzylorda", "Asia/Rangoon", "Asia/Riyadh", "Asia/Saigon", "Asia/Sakhalin", "Asia/Samarkand", "Asia/Seoul", "Asia/Shanghai", "Asia/Singapore", "Asia/Srednekolymsk", "Asia/Taipei", "Asia/Tashkent", "Asia/Tbilisi", "Asia/Tehran", "Asia/Tel_Aviv", "Asia/Thimbu", "Asia/Thimphu", "Asia/Tokyo", "Asia/Tomsk", "Asia/Ujung_Pandang", "Asia/Ulaanbaatar", "Asia/Ulan_Bator", "Asia/Urumqi", "Asia/Ust-Nera", "Asia/Vientiane", "Asia/Vladivostok", "Asia/Yakutsk", "Asia/Yangon", "Asia/Yekaterinburg", "Asia/Yerevan", "Atlantic/Azores", "Atlantic/Bermuda", "Atlantic/Canary", "Atlantic/Cape_Verde", "Atlantic/Faeroe", "Atlantic/Faroe", "Atlantic/Jan_Mayen", "Atlantic/Madeira", "Atlantic/Reykjavik", "Atlantic/South_Georgia", "Atlantic/St_Helena", "Atlantic/Stanley", "Australia/Adelaide", "Australia/Brisbane", "Australia/Broken_Hill", "Australia/Canberra", "Australia/Currie", "Australia/Darwin", "Australia/Eucla", "Australia/Hobart", "Australia/Lindeman", "Australia/Lord_Howe", "Australia/Melbourne", "Australia/Perth", "Australia/Sydney", "Australia/Yancowinna", "Etc/GMT", "Etc/GMT+0", "Etc/GMT+1", "Etc/GMT+10", "Etc/GMT+11", "Etc/GMT+12", "Etc/GMT+2", "Etc/GMT+3", "Etc/GMT+4", "Etc/GMT+5", "Etc/GMT+6", "Etc/GMT+7", "Etc/GMT+8", "Etc/GMT+9", "Etc/GMT0", "Etc/GMT-0", "Etc/GMT-1", "Etc/GMT-10", "Etc/GMT-11", "Etc/GMT-12", "Etc/GMT-13", "Etc/GMT-14", "Etc/GMT-2", "Etc/GMT-3", "Etc/GMT-4", "Etc/GMT-5", "Etc/GMT-6", "Etc/GMT-7", "Etc/GMT-8", "Etc/GMT-9", "Etc/UTC", "Europe/Amsterdam", "Europe/Andorra", "Europe/Astrakhan", "Europe/Athens", "Europe/Belfast", "Europe/Belgrade", "Europe/Berlin", "Europe/Bratislava", "Europe/Brussels", "Europe/Bucharest", "Europe/Budapest", "Europe/Busingen", "Europe/Chisinau", "Europe/Copenhagen", "Europe/Dublin", "Europe/Gibraltar", "Europe/Guernsey", "Europe/Helsinki", "Europe/Isle_of_Man", "Europe/Istanbul", "Europe/Jersey", "Europe/Kaliningrad", "Europe/Kiev", "Europe/Kirov", "Europe/Lisbon", "Europe/Ljubljana", "Europe/London", "Europe/Luxembourg", "Europe/Madrid", "Europe/Malta", "Europe/Mariehamn", "Europe/Minsk", "Europe/Monaco", "Europe/Moscow", "Asia/Nicosia", "Europe/Oslo", "Europe/Paris", "Europe/Podgorica", "Europe/Prague", "Europe/Riga", "Europe/Rome", "Europe/Samara", "Europe/San_Marino", "Europe/Sarajevo", "Europe/Saratov", "Europe/Simferopol", "Europe/Skopje", "Europe/Sofia", "Europe/Stockholm", "Europe/Tallinn", "Europe/Tirane", "Europe/Tiraspol", "Europe/Ulyanovsk", "Europe/Uzhgorod", "Europe/Vaduz", "Europe/Vatican", "Europe/Vienna", "Europe/Vilnius", "Europe/Volgograd", "Europe/Warsaw", "Europe/Zagreb", "Europe/Zaporozhye", "Europe/Zurich", "Indian/Antananarivo", "Indian/Chagos", "Indian/Christmas", "Indian/Cocos", "Indian/Comoro", "Indian/Kerguelen", "Indian/Mahe", "Indian/Maldives", "Indian/Mauritius", "Indian/Mayotte", "Indian/Reunion", "Pacific/Apia", "Pacific/Auckland", "Pacific/Bougainville", "Pacific/Chatham", "Pacific/Chuuk", "Pacific/Easter", "Pacific/Efate", "Pacific/Enderbury", "Pacific/Fakaofo", "Pacific/Fiji", "Pacific/Funafuti", "Pacific/Galapagos", "Pacific/Gambier", "Pacific/Guadalcanal", "Pacific/Guam", "Pacific/Honolulu", "Pacific/Johnston", "Pacific/Kiritimati", "Pacific/Kosrae", "Pacific/Kwajalein", "Pacific/Majuro", "Pacific/Marquesas", "Pacific/Midway", "Pacific/Nauru", "Pacific/Niue", "Pacific/Norfolk", "Pacific/Noumea", "Pacific/Pago_Pago", "Pacific/Palau", "Pacific/Pitcairn", "Pacific/Pohnpei", "Pacific/Ponape", "Pacific/Port_Moresby", "Pacific/Rarotonga", "Pacific/Saipan", "Pacific/Samoa", "Pacific/Tahiti", "Pacific/Tarawa", "Pacific/Tongatapu", "Pacific/Truk", "Pacific/Wake", "Pacific/Wallis", "Pacific/Yap"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/emoji.go b/vendor/github.com/brianvoe/gofakeit/v7/data/emoji.go new file mode 100644 index 0000000000..8f8ce80f95 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/emoji.go @@ -0,0 +1,5849 @@ +package data + +// Data is pull from https://raw.githubusercontent.com/github/gemoji/master/db/emoji.json + +// Emoji consists of emoji information +var Emoji = map[string][]string{ + "emoji": { + "😀", + "😃", + "😄", + "😁", + "😆", + "😅", + "🤣", + "😂", + "🙂", + "🙃", + "😉", + "😊", + "😇", + "🥰", + "😍", + "🤩", + "😘", + "😗", + "☺️", + "😚", + "😙", + "😋", + "😛", + "😜", + "🤪", + "😝", + "🤑", + "🤗", + "🤭", + "🤫", + "🤔", + "🤐", + "🤨", + "😐", + "😑", + "😶", + "😏", + "😒", + "🙄", + "😬", + "🤥", + "😌", + "😔", + "😪", + "🤤", + "😴", + "😷", + "🤒", + "🤕", + "🤢", + "🤮", + "🤧", + "🥵", + "🥶", + "🥴", + "😵", + "🤯", + "🤠", + "🥳", + "😎", + "🤓", + "🧐", + "😕", + "😟", + "🙁", + "☹️", + "😮", + "😯", + "😲", + "😳", + "🥺", + "😦", + "😧", + "😨", + "😰", + "😥", + "😢", + "😭", + "😱", + "😖", + "😣", + "😞", + "😓", + "😩", + "😫", + "🥱", + "😤", + "😡", + "😠", + "🤬", + "😈", + "👿", + "💀", + "☠️", + "💩", + "🤡", + "👹", + "👺", + "👻", + "👽", + "👾", + "🤖", + "😺", + "😸", + "😹", + "😻", + "😼", + "😽", + "🙀", + "😿", + "😾", + "🙈", + "🙉", + "🙊", + "💋", + "💌", + "💘", + "💝", + "💖", + "💗", + "💓", + "💞", + "💕", + "💟", + "❣️", + "💔", + "❤️", + "🧡", + "💛", + "💚", + "💙", + "💜", + "🤎", + "🖤", + "🤍", + "💯", + "💢", + "💥", + "💫", + "💦", + "💨", + "🕳️", + "💣", + "💬", + "🗨️", + "🗯️", + "💭", + "💤", + "👋", + "🤚", + "🖐️", + "✋", + "🖖", + "👌", + "🤏", + "✌️", + "🤞", + "🤟", + "🤘", + "🤙", + "👈", + "👉", + "👆", + "🖕", + "👇", + "☝️", + "👍", + "👎", + "✊", + "👊", + "🤛", + "🤜", + "👏", + "🙌", + "👐", + "🤲", + "🤝", + "🙏", + "✍️", + "💅", + "🤳", + "💪", + "🦾", + "🦿", + "🦵", + "🦶", + "👂", + "🦻", + "👃", + "🧠", + "🦷", + "🦴", + "👀", + "👁️", + "👅", + "👄", + "👶", + "🧒", + "👦", + "👧", + "🧑", + "👱", + "👨", + "🧔", + "👨‍🦰", + "👨‍🦱", + "👨‍🦳", + "👨‍🦲", + "👩", + "👩‍🦰", + "🧑‍🦰", + "👩‍🦱", + "🧑‍🦱", + "👩‍🦳", + "🧑‍🦳", + "👩‍🦲", + "🧑‍🦲", + "👱‍♀️", + "👱‍♂️", + "🧓", + "👴", + "👵", + "🙍", + "🙍‍♂️", + "🙍‍♀️", + "🙎", + "🙎‍♂️", + "🙎‍♀️", + "🙅", + "🙅‍♂️", + "🙅‍♀️", + "🙆", + "🙆‍♂️", + "🙆‍♀️", + "💁", + "💁‍♂️", + "💁‍♀️", + "🙋", + "🙋‍♂️", + "🙋‍♀️", + "🧏", + "🧏‍♂️", + "🧏‍♀️", + "🙇", + "🙇‍♂️", + "🙇‍♀️", + "🤦", + "🤦‍♂️", + "🤦‍♀️", + "🤷", + "🤷‍♂️", + "🤷‍♀️", + "🧑‍⚕️", + "👨‍⚕️", + "👩‍⚕️", + "🧑‍🎓", + "👨‍🎓", + "👩‍🎓", + "🧑‍🏫", + "👨‍🏫", + "👩‍🏫", + "🧑‍⚖️", + "👨‍⚖️", + "👩‍⚖️", + "🧑‍🌾", + "👨‍🌾", + "👩‍🌾", + "🧑‍🍳", + "👨‍🍳", + "👩‍🍳", + "🧑‍🔧", + "👨‍🔧", + "👩‍🔧", + "🧑‍🏭", + "👨‍🏭", + "👩‍🏭", + "🧑‍💼", + "👨‍💼", + "👩‍💼", + "🧑‍🔬", + "👨‍🔬", + "👩‍🔬", + "🧑‍💻", + "👨‍💻", + "👩‍💻", + "🧑‍🎤", + "👨‍🎤", + "👩‍🎤", + "🧑‍🎨", + "👨‍🎨", + "👩‍🎨", + "🧑‍✈️", + "👨‍✈️", + "👩‍✈️", + "🧑‍🚀", + "👨‍🚀", + "👩‍🚀", + "🧑‍🚒", + "👨‍🚒", + "👩‍🚒", + "👮", + "👮‍♂️", + "👮‍♀️", + "🕵️", + "💂", + "💂‍♂️", + "💂‍♀️", + "👷", + "👷‍♂️", + "👷‍♀️", + "🤴", + "👸", + "👳", + "👳‍♂️", + "👳‍♀️", + "👲", + "🧕", + "🤵", + "🤵‍♂️", + "🤵‍♀️", + "👰", + "👰‍♂️", + "👰‍♀️", + "🤰", + "🤱", + "👩‍🍼", + "👨‍🍼", + "🧑‍🍼", + "👼", + "🎅", + "🤶", + "🧑‍🎄", + "🦸", + "🦸‍♂️", + "🦸‍♀️", + "🦹", + "🦹‍♂️", + "🦹‍♀️", + "🧙", + "🧙‍♂️", + "🧙‍♀️", + "🧚", + "🧚‍♂️", + "🧚‍♀️", + "🧛", + "🧛‍♂️", + "🧛‍♀️", + "🧜", + "🧜‍♂️", + "🧜‍♀️", + "🧝", + "🧝‍♂️", + "🧝‍♀️", + "🧞", + "🧞‍♂️", + "🧞‍♀️", + "🧟", + "🧟‍♂️", + "🧟‍♀️", + "💆", + "💆‍♂️", + "💆‍♀️", + "💇", + "💇‍♂️", + "💇‍♀️", + "🚶", + "🚶‍♂️", + "🚶‍♀️", + "🧍", + "🧍‍♂️", + "🧍‍♀️", + "🧎", + "🧎‍♂️", + "🧎‍♀️", + "🧑‍🦯", + "👨‍🦯", + "👩‍🦯", + "🧑‍🦼", + "👨‍🦼", + "👩‍🦼", + "🧑‍🦽", + "👨‍🦽", + "👩‍🦽", + "🏃", + "🏃‍♂️", + "🏃‍♀️", + "💃", + "🕺", + "🕴️", + "👯", + "👯‍♂️", + "👯‍♀️", + "🧖", + "🧖‍♂️", + "🧖‍♀️", + "🧗", + "🧗‍♂️", + "🧗‍♀️", + "🤺", + "🏇", + "⛷️", + "🏂", + "🏌️", + "🏄", + "🚣", + "🚣‍♂️", + "🚣‍♀️", + "🏊", + "⛹️", + "🏋️", + "🚴", + "🚴‍♂️", + "🚴‍♀️", + "🚵", + "🚵‍♂️", + "🚵‍♀️", + "🤸", + "🤸‍♂️", + "🤸‍♀️", + "🤼", + "🤼‍♂️", + "🤼‍♀️", + "🤽", + "🤽‍♂️", + "🤽‍♀️", + "🤾", + "🤾‍♂️", + "🤾‍♀️", + "🤹", + "🤹‍♂️", + "🤹‍♀️", + "🧘", + "🧘‍♂️", + "🧘‍♀️", + "🛀", + "🛌", + "🧑‍🤝‍🧑", + "👭", + "👫", + "👬", + "💏", + "👪", + "👨‍👩‍👦", + "👨‍👩‍👧", + "👨‍👩‍👧‍👦", + "👨‍👩‍👦‍👦", + "👨‍👩‍👧‍👧", + "👨‍👨‍👦", + "👨‍👨‍👧", + "👨‍👨‍👧‍👦", + "👨‍👨‍👦‍👦", + "👨‍👨‍👧‍👧", + "👩‍👩‍👦", + "👩‍👩‍👧", + "👩‍👩‍👧‍👦", + "👩‍👩‍👦‍👦", + "👩‍👩‍👧‍👧", + "👨‍👦", + "👨‍👦‍👦", + "👨‍👧", + "👨‍👧‍👦", + "👨‍👧‍👧", + "👩‍👦", + "👩‍👦‍👦", + "👩‍👧", + "👩‍👧‍👦", + "👩‍👧‍👧", + "🗣️", + "👤", + "👥", + "👣", + "🐵", + "🐒", + "🦍", + "🦧", + "🐶", + "🐕", + "🦮", + "🐩", + "🐺", + "🦊", + "🦝", + "🐱", + "🐈", + "🐈‍⬛", + "🦁", + "🐯", + "🐅", + "🐆", + "🐴", + "🐎", + "🦄", + "🦓", + "🦌", + "🐮", + "🐂", + "🐃", + "🐄", + "🐷", + "🐖", + "🐗", + "🐽", + "🐏", + "🐑", + "🐐", + "🐪", + "🐫", + "🦙", + "🦒", + "🐘", + "🦏", + "🦛", + "🐭", + "🐁", + "🐀", + "🐹", + "🐰", + "🐇", + "🐿️", + "🦔", + "🦇", + "🐻", + "🐻‍❄️", + "🐨", + "🐼", + "🦥", + "🦦", + "🦨", + "🦘", + "🦡", + "🐾", + "🦃", + "🐔", + "🐓", + "🐣", + "🐤", + "🐥", + "🐦", + "🐧", + "🕊️", + "🦅", + "🦆", + "🦢", + "🦉", + "🦩", + "🦚", + "🦜", + "🐸", + "🐊", + "🐢", + "🦎", + "🐍", + "🐲", + "🐉", + "🦕", + "🦖", + "🐳", + "🐋", + "🐬", + "🐟", + "🐠", + "🐡", + "🦈", + "🐙", + "🐚", + "🐌", + "🦋", + "🐛", + "🐜", + "🐝", + "🐞", + "🦗", + "🕷️", + "🕸️", + "🦂", + "🦟", + "🦠", + "💐", + "🌸", + "💮", + "🏵️", + "🌹", + "🥀", + "🌺", + "🌻", + "🌼", + "🌷", + "🌱", + "🌲", + "🌳", + "🌴", + "🌵", + "🌾", + "🌿", + "☘️", + "🍀", + "🍁", + "🍂", + "🍃", + "🍇", + "🍈", + "🍉", + "🍊", + "🍋", + "🍌", + "🍍", + "🥭", + "🍎", + "🍏", + "🍐", + "🍑", + "🍒", + "🍓", + "🥝", + "🍅", + "🥥", + "🥑", + "🍆", + "🥔", + "🥕", + "🌽", + "🌶️", + "🥒", + "🥬", + "🥦", + "🧄", + "🧅", + "🍄", + "🥜", + "🌰", + "🍞", + "🥐", + "🥖", + "🥨", + "🥯", + "🥞", + "🧇", + "🧀", + "🍖", + "🍗", + "🥩", + "🥓", + "🍔", + "🍟", + "🍕", + "🌭", + "🥪", + "🌮", + "🌯", + "🥙", + "🧆", + "🥚", + "🍳", + "🥘", + "🍲", + "🥣", + "🥗", + "🍿", + "🧈", + "🧂", + "🥫", + "🍱", + "🍘", + "🍙", + "🍚", + "🍛", + "🍜", + "🍝", + "🍠", + "🍢", + "🍣", + "🍤", + "🍥", + "🥮", + "🍡", + "🥟", + "🥠", + "🥡", + "🦀", + "🦞", + "🦐", + "🦑", + "🦪", + "🍦", + "🍧", + "🍨", + "🍩", + "🍪", + "🎂", + "🍰", + "🧁", + "🥧", + "🍫", + "🍬", + "🍭", + "🍮", + "🍯", + "🍼", + "🥛", + "🍵", + "🍶", + "🍾", + "🍷", + "🍸", + "🍹", + "🍺", + "🍻", + "🥂", + "🥃", + "🥤", + "🧃", + "🧉", + "🧊", + "🥢", + "🍽️", + "🍴", + "🥄", + "🔪", + "🏺", + "🌍", + "🌎", + "🌏", + "🌐", + "🗺️", + "🗾", + "🧭", + "🏔️", + "⛰️", + "🌋", + "🗻", + "🏕️", + "🏖️", + "🏜️", + "🏝️", + "🏞️", + "🏟️", + "🏛️", + "🏗️", + "🧱", + "🏘️", + "🏚️", + "🏠", + "🏡", + "🏢", + "🏣", + "🏤", + "🏥", + "🏦", + "🏨", + "🏩", + "🏪", + "🏫", + "🏬", + "🏭", + "🏯", + "🏰", + "💒", + "🗼", + "🗽", + "⛪", + "🕌", + "🛕", + "🕍", + "⛩️", + "🕋", + "⛲", + "⛺", + "🌁", + "🌃", + "🏙️", + "🌄", + "🌅", + "🌆", + "🌇", + "🌉", + "♨️", + "🎠", + "🎡", + "🎢", + "💈", + "🎪", + "🚂", + "🚃", + "🚄", + "🚅", + "🚆", + "🚇", + "🚈", + "🚉", + "🚊", + "🚝", + "🚞", + "🚋", + "🚌", + "🚍", + "🚎", + "🚐", + "🚑", + "🚒", + "🚓", + "🚔", + "🚕", + "🚖", + "🚗", + "🚘", + "🚙", + "🚚", + "🚛", + "🚜", + "🏎️", + "🏍️", + "🛵", + "🦽", + "🦼", + "🛺", + "🚲", + "🛴", + "🛹", + "🚏", + "🛣️", + "🛤️", + "🛢️", + "⛽", + "🚨", + "🚥", + "🚦", + "🛑", + "🚧", + "⚓", + "⛵", + "🛶", + "🚤", + "🛳️", + "⛴️", + "🛥️", + "🚢", + "✈️", + "🛩️", + "🛫", + "🛬", + "🪂", + "💺", + "🚁", + "🚟", + "🚠", + "🚡", + "🛰️", + "🚀", + "🛸", + "🛎️", + "🧳", + "⌛", + "⏳", + "⌚", + "⏰", + "⏱️", + "⏲️", + "🕰️", + "🕛", + "🕧", + "🕐", + "🕜", + "🕑", + "🕝", + "🕒", + "🕞", + "🕓", + "🕟", + "🕔", + "🕠", + "🕕", + "🕡", + "🕖", + "🕢", + "🕗", + "🕣", + "🕘", + "🕤", + "🕙", + "🕥", + "🕚", + "🕦", + "🌑", + "🌒", + "🌓", + "🌔", + "🌕", + "🌖", + "🌗", + "🌘", + "🌙", + "🌚", + "🌛", + "🌜", + "🌡️", + "☀️", + "🌝", + "🌞", + "🪐", + "⭐", + "🌟", + "🌠", + "🌌", + "☁️", + "⛅", + "⛈️", + "🌤️", + "🌥️", + "🌦️", + "🌧️", + "🌨️", + "🌩️", + "🌪️", + "🌫️", + "🌬️", + "🌀", + "🌈", + "🌂", + "☂️", + "☔", + "⛱️", + "⚡", + "❄️", + "☃️", + "⛄", + "☄️", + "🔥", + "💧", + "🌊", + "🎃", + "🎄", + "🎆", + "🎇", + "🧨", + "✨", + "🎈", + "🎉", + "🎊", + "🎋", + "🎍", + "🎎", + "🎏", + "🎐", + "🎑", + "🧧", + "🎀", + "🎁", + "🎗️", + "🎟️", + "🎫", + "🎖️", + "🏆", + "🏅", + "🥇", + "🥈", + "🥉", + "⚽", + "⚾", + "🥎", + "🏀", + "🏐", + "🏈", + "🏉", + "🎾", + "🥏", + "🎳", + "🏏", + "🏑", + "🏒", + "🥍", + "🏓", + "🏸", + "🥊", + "🥋", + "🥅", + "⛳", + "⛸️", + "🎣", + "🤿", + "🎽", + "🎿", + "🛷", + "🥌", + "🎯", + "🪀", + "🪁", + "🎱", + "🔮", + "🧿", + "🎮", + "🕹️", + "🎰", + "🎲", + "🧩", + "🧸", + "♠️", + "♥️", + "♦️", + "♣️", + "♟️", + "🃏", + "🀄", + "🎴", + "🎭", + "🖼️", + "🎨", + "🧵", + "🧶", + "👓", + "🕶️", + "🥽", + "🥼", + "🦺", + "👔", + "👕", + "👖", + "🧣", + "🧤", + "🧥", + "🧦", + "👗", + "👘", + "🥻", + "🩱", + "🩲", + "🩳", + "👙", + "👚", + "👛", + "👜", + "👝", + "🛍️", + "🎒", + "👞", + "👟", + "🥾", + "🥿", + "👠", + "👡", + "🩰", + "👢", + "👑", + "👒", + "🎩", + "🎓", + "🧢", + "⛑️", + "📿", + "💄", + "💍", + "💎", + "🔇", + "🔈", + "🔉", + "🔊", + "📢", + "📣", + "📯", + "🔔", + "🔕", + "🎼", + "🎵", + "🎶", + "🎙️", + "🎚️", + "🎛️", + "🎤", + "🎧", + "📻", + "🎷", + "🎸", + "🎹", + "🎺", + "🎻", + "🪕", + "🥁", + "📱", + "📲", + "☎️", + "📞", + "📟", + "📠", + "🔋", + "🔌", + "💻", + "🖥️", + "🖨️", + "⌨️", + "🖱️", + "🖲️", + "💽", + "💾", + "💿", + "📀", + "🧮", + "🎥", + "🎞️", + "📽️", + "🎬", + "📺", + "📷", + "📸", + "📹", + "📼", + "🔍", + "🔎", + "🕯️", + "💡", + "🔦", + "🏮", + "🪔", + "📔", + "📕", + "📖", + "📗", + "📘", + "📙", + "📚", + "📓", + "📒", + "📃", + "📜", + "📄", + "📰", + "🗞️", + "📑", + "🔖", + "🏷️", + "💰", + "💴", + "💵", + "💶", + "💷", + "💸", + "💳", + "🧾", + "💹", + "✉️", + "📧", + "📨", + "📩", + "📤", + "📥", + "📦", + "📫", + "📪", + "📬", + "📭", + "📮", + "🗳️", + "✏️", + "✒️", + "🖋️", + "🖊️", + "🖌️", + "🖍️", + "📝", + "💼", + "📁", + "📂", + "🗂️", + "📅", + "📆", + "🗒️", + "🗓️", + "📇", + "📈", + "📉", + "📊", + "📋", + "📌", + "📍", + "📎", + "🖇️", + "📏", + "📐", + "✂️", + "🗃️", + "🗄️", + "🗑️", + "🔒", + "🔓", + "🔏", + "🔐", + "🔑", + "🗝️", + "🔨", + "🪓", + "⛏️", + "⚒️", + "🛠️", + "🗡️", + "⚔️", + "🔫", + "🏹", + "🛡️", + "🔧", + "🔩", + "⚙️", + "🗜️", + "⚖️", + "🦯", + "🔗", + "⛓️", + "🧰", + "🧲", + "⚗️", + "🧪", + "🧫", + "🧬", + "🔬", + "🔭", + "📡", + "💉", + "🩸", + "💊", + "🩹", + "🩺", + "🚪", + "🛏️", + "🛋️", + "🪑", + "🚽", + "🚿", + "🛁", + "🪒", + "🧴", + "🧷", + "🧹", + "🧺", + "🧻", + "🧼", + "🧽", + "🧯", + "🛒", + "🚬", + "⚰️", + "⚱️", + "🗿", + "🏧", + "🚮", + "🚰", + "♿", + "🚹", + "🚺", + "🚻", + "🚼", + "🚾", + "🛂", + "🛃", + "🛄", + "🛅", + "⚠️", + "🚸", + "⛔", + "🚫", + "🚳", + "🚭", + "🚯", + "🚱", + "🚷", + "📵", + "🔞", + "☢️", + "☣️", + "⬆️", + "↗️", + "➡️", + "↘️", + "⬇️", + "↙️", + "⬅️", + "↖️", + "↕️", + "↔️", + "↩️", + "↪️", + "⤴️", + "⤵️", + "🔃", + "🔄", + "🔙", + "🔚", + "🔛", + "🔜", + "🔝", + "🛐", + "⚛️", + "🕉️", + "✡️", + "☸️", + "☯️", + "✝️", + "☦️", + "☪️", + "☮️", + "🕎", + "🔯", + "♈", + "♉", + "♊", + "♋", + "♌", + "♍", + "♎", + "♏", + "♐", + "♑", + "♒", + "♓", + "⛎", + "🔀", + "🔁", + "🔂", + "▶️", + "⏩", + "⏭️", + "⏯️", + "◀️", + "⏪", + "⏮️", + "🔼", + "⏫", + "🔽", + "⏬", + "⏸️", + "⏹️", + "⏺️", + "⏏️", + "🎦", + "🔅", + "🔆", + "📶", + "📳", + "📴", + "♀️", + "♂️", + "⚧️", + "✖️", + "➕", + "➖", + "➗", + "♾️", + "‼️", + "⁉️", + "❓", + "❔", + "❕", + "❗", + "〰️", + "💱", + "💲", + "⚕️", + "♻️", + "⚜️", + "🔱", + "📛", + "🔰", + "⭕", + "✅", + "☑️", + "✔️", + "❌", + "❎", + "➰", + "➿", + "〽️", + "✳️", + "✴️", + "❇️", + "©️", + "®️", + "™️", + "#️⃣", + "*️⃣", + "0️⃣", + "1️⃣", + "2️⃣", + "3️⃣", + "4️⃣", + "5️⃣", + "6️⃣", + "7️⃣", + "8️⃣", + "9️⃣", + "🔟", + "🔠", + "🔡", + "🔢", + "🔣", + "🔤", + "🅰️", + "🆎", + "🅱️", + "🆑", + "🆒", + "🆓", + "ℹ️", + "🆔", + "Ⓜ️", + "🆕", + "🆖", + "🅾️", + "🆗", + "🅿️", + "🆘", + "🆙", + "🆚", + "🈁", + "🈂️", + "🈷️", + "🈶", + "🈯", + "🉐", + "🈹", + "🈚", + "🈲", + "🉑", + "🈸", + "🈴", + "🈳", + "㊗️", + "㊙️", + "🈺", + "🈵", + "🔴", + "🟠", + "🟡", + "🟢", + "🔵", + "🟣", + "🟤", + "⚫", + "⚪", + "🟥", + "🟧", + "🟨", + "🟩", + "🟦", + "🟪", + "🟫", + "⬛", + "⬜", + "◼️", + "◻️", + "◾", + "◽", + "▪️", + "▫️", + "🔶", + "🔷", + "🔸", + "🔹", + "🔺", + "🔻", + "💠", + "🔘", + "🔳", + "🔲", + "🏁", + "🚩", + "🎌", + "🏴", + "🏳️", + "🏴‍☠️", + "🇦🇨", + "🇦🇩", + "🇦🇪", + "🇦🇫", + "🇦🇬", + "🇦🇮", + "🇦🇱", + "🇦🇲", + "🇦🇴", + "🇦🇶", + "🇦🇷", + "🇦🇸", + "🇦🇹", + "🇦🇺", + "🇦🇼", + "🇦🇽", + "🇦🇿", + "🇧🇦", + "🇧🇧", + "🇧🇩", + "🇧🇪", + "🇧🇫", + "🇧🇬", + "🇧🇭", + "🇧🇮", + "🇧🇯", + "🇧🇱", + "🇧🇲", + "🇧🇳", + "🇧🇴", + "🇧🇶", + "🇧🇷", + "🇧🇸", + "🇧🇹", + "🇧🇻", + "🇧🇼", + "🇧🇾", + "🇧🇿", + "🇨🇦", + "🇨🇨", + "🇨🇩", + "🇨🇫", + "🇨🇬", + "🇨🇭", + "🇨🇮", + "🇨🇰", + "🇨🇱", + "🇨🇲", + "🇨🇳", + "🇨🇴", + "🇨🇵", + "🇨🇷", + "🇨🇺", + "🇨🇻", + "🇨🇼", + "🇨🇽", + "🇨🇾", + "🇨🇿", + "🇩🇪", + "🇩🇬", + "🇩🇯", + "🇩🇰", + "🇩🇲", + "🇩🇴", + "🇩🇿", + "🇪🇦", + "🇪🇨", + "🇪🇪", + "🇪🇬", + "🇪🇭", + "🇪🇷", + "🇪🇸", + "🇪🇹", + "🇪🇺", + "🇫🇮", + "🇫🇯", + "🇫🇰", + "🇫🇲", + "🇫🇴", + "🇫🇷", + "🇬🇦", + "🇬🇧", + "🇬🇩", + "🇬🇪", + "🇬🇫", + "🇬🇬", + "🇬🇭", + "🇬🇮", + "🇬🇱", + "🇬🇲", + "🇬🇳", + "🇬🇵", + "🇬🇶", + "🇬🇷", + "🇬🇸", + "🇬🇹", + "🇬🇺", + "🇬🇼", + "🇬🇾", + "🇭🇰", + "🇭🇲", + "🇭🇳", + "🇭🇷", + "🇭🇹", + "🇭🇺", + "🇮🇨", + "🇮🇩", + "🇮🇪", + "🇮🇱", + "🇮🇲", + "🇮🇳", + "🇮🇴", + "🇮🇶", + "🇮🇷", + "🇮🇸", + "🇮🇹", + "🇯🇪", + "🇯🇲", + "🇯🇴", + "🇯🇵", + "🇰🇪", + "🇰🇬", + "🇰🇭", + "🇰🇮", + "🇰🇲", + "🇰🇳", + "🇰🇵", + "🇰🇷", + "🇰🇼", + "🇰🇾", + "🇰🇿", + "🇱🇦", + "🇱🇧", + "🇱🇨", + "🇱🇮", + "🇱🇰", + "🇱🇷", + "🇱🇸", + "🇱🇹", + "🇱🇺", + "🇱🇻", + "🇱🇾", + "🇲🇦", + "🇲🇨", + "🇲🇩", + "🇲🇪", + "🇲🇫", + "🇲🇬", + "🇲🇭", + "🇲🇰", + "🇲🇱", + "🇲🇲", + "🇲🇳", + "🇲🇴", + "🇲🇵", + "🇲🇶", + "🇲🇷", + "🇲🇸", + "🇲🇹", + "🇲🇺", + "🇲🇻", + "🇲🇼", + "🇲🇽", + "🇲🇾", + "🇲🇿", + "🇳🇦", + "🇳🇨", + "🇳🇪", + "🇳🇫", + "🇳🇬", + "🇳🇮", + "🇳🇱", + "🇳🇴", + "🇳🇵", + "🇳🇷", + "🇳🇺", + "🇳🇿", + "🇴🇲", + "🇵🇦", + "🇵🇪", + "🇵🇫", + "🇵🇬", + "🇵🇭", + "🇵🇰", + "🇵🇱", + "🇵🇲", + "🇵🇳", + "🇵🇷", + "🇵🇸", + "🇵🇹", + "🇵🇼", + "🇵🇾", + "🇶🇦", + "🇷🇪", + "🇷🇴", + "🇷🇸", + "🇷🇺", + "🇷🇼", + "🇸🇦", + "🇸🇧", + "🇸🇨", + "🇸🇩", + "🇸🇪", + "🇸🇬", + "🇸🇭", + "🇸🇮", + "🇸🇯", + "🇸🇰", + "🇸🇱", + "🇸🇲", + "🇸🇳", + "🇸🇴", + "🇸🇷", + "🇸🇸", + "🇸🇹", + "🇸🇻", + "🇸🇽", + "🇸🇾", + "🇸🇿", + "🇹🇦", + "🇹🇨", + "🇹🇩", + "🇹🇫", + "🇹🇬", + "🇹🇭", + "🇹🇯", + "🇹🇰", + "🇹🇱", + "🇹🇲", + "🇹🇳", + "🇹🇴", + "🇹🇷", + "🇹🇹", + "🇹🇻", + "🇹🇼", + "🇹🇿", + "🇺🇦", + "🇺🇬", + "🇺🇲", + "🇺🇳", + "🇺🇸", + "🇺🇾", + "🇺🇿", + "🇻🇦", + "🇻🇨", + "🇻🇪", + "🇻🇬", + "🇻🇮", + "🇻🇳", + "🇻🇺", + "🇼🇫", + "🇼🇸", + "🇽🇰", + "🇾🇪", + "🇾🇹", + "🇿🇦", + "🇿🇲", + "🇿🇼", + }, + "description": { + "grinning face", + "grinning face with big eyes", + "grinning face with smiling eyes", + "beaming face with smiling eyes", + "grinning squinting face", + "grinning face with sweat", + "rolling on the floor laughing", + "face with tears of joy", + "slightly smiling face", + "upside-down face", + "winking face", + "smiling face with smiling eyes", + "smiling face with halo", + "smiling face with hearts", + "smiling face with heart-eyes", + "star-struck", + "face blowing a kiss", + "kissing face", + "smiling face", + "kissing face with closed eyes", + "kissing face with smiling eyes", + "smiling face with tear", + "face savoring food", + "face with tongue", + "winking face with tongue", + "zany face", + "squinting face with tongue", + "money-mouth face", + "hugging face", + "face with hand over mouth", + "shushing face", + "thinking face", + "zipper-mouth face", + "face with raised eyebrow", + "neutral face", + "expressionless face", + "face without mouth", + "smirking face", + "unamused face", + "face with rolling eyes", + "grimacing face", + "lying face", + "relieved face", + "pensive face", + "sleepy face", + "drooling face", + "sleeping face", + "face with medical mask", + "face with thermometer", + "face with head-bandage", + "nauseated face", + "face vomiting", + "sneezing face", + "hot face", + "cold face", + "woozy face", + "dizzy face", + "exploding head", + "cowboy hat face", + "partying face", + "disguised face", + "smiling face with sunglasses", + "nerd face", + "face with monocle", + "confused face", + "worried face", + "slightly frowning face", + "frowning face", + "face with open mouth", + "hushed face", + "astonished face", + "flushed face", + "pleading face", + "frowning face with open mouth", + "anguished face", + "fearful face", + "anxious face with sweat", + "sad but relieved face", + "crying face", + "loudly crying face", + "face screaming in fear", + "confounded face", + "persevering face", + "disappointed face", + "downcast face with sweat", + "weary face", + "tired face", + "yawning face", + "face with steam from nose", + "pouting face", + "angry face", + "face with symbols on mouth", + "smiling face with horns", + "angry face with horns", + "skull", + "skull and crossbones", + "pile of poo", + "clown face", + "ogre", + "goblin", + "ghost", + "alien", + "alien monster", + "robot", + "grinning cat", + "grinning cat with smiling eyes", + "cat with tears of joy", + "smiling cat with heart-eyes", + "cat with wry smile", + "kissing cat", + "weary cat", + "crying cat", + "pouting cat", + "see-no-evil monkey", + "hear-no-evil monkey", + "speak-no-evil monkey", + "kiss mark", + "love letter", + "heart with arrow", + "heart with ribbon", + "sparkling heart", + "growing heart", + "beating heart", + "revolving hearts", + "two hearts", + "heart decoration", + "heart exclamation", + "broken heart", + "red heart", + "orange heart", + "yellow heart", + "green heart", + "blue heart", + "purple heart", + "brown heart", + "black heart", + "white heart", + "hundred points", + "anger symbol", + "collision", + "dizzy", + "sweat droplets", + "dashing away", + "hole", + "bomb", + "speech balloon", + "eye in speech bubble", + "left speech bubble", + "right anger bubble", + "thought balloon", + "zzz", + "waving hand", + "raised back of hand", + "hand with fingers splayed", + "raised hand", + "vulcan salute", + "OK hand", + "pinched fingers", + "pinching hand", + "victory hand", + "crossed fingers", + "love-you gesture", + "sign of the horns", + "call me hand", + "backhand index pointing left", + "backhand index pointing right", + "backhand index pointing up", + "middle finger", + "backhand index pointing down", + "index pointing up", + "thumbs up", + "thumbs down", + "raised fist", + "oncoming fist", + "left-facing fist", + "right-facing fist", + "clapping hands", + "raising hands", + "open hands", + "palms up together", + "handshake", + "folded hands", + "writing hand", + "nail polish", + "selfie", + "flexed biceps", + "mechanical arm", + "mechanical leg", + "leg", + "foot", + "ear", + "ear with hearing aid", + "nose", + "brain", + "anatomical heart", + "lungs", + "tooth", + "bone", + "eyes", + "eye", + "tongue", + "mouth", + "baby", + "child", + "boy", + "girl", + "person", + "person: blond hair", + "man", + "man: beard", + "man: red hair", + "man: curly hair", + "man: white hair", + "man: bald", + "woman", + "woman: red hair", + "person: red hair", + "woman: curly hair", + "person: curly hair", + "woman: white hair", + "person: white hair", + "woman: bald", + "person: bald", + "woman: blond hair", + "man: blond hair", + "older person", + "old man", + "old woman", + "person frowning", + "man frowning", + "woman frowning", + "person pouting", + "man pouting", + "woman pouting", + "person gesturing NO", + "man gesturing NO", + "woman gesturing NO", + "person gesturing OK", + "man gesturing OK", + "woman gesturing OK", + "person tipping hand", + "man tipping hand", + "woman tipping hand", + "person raising hand", + "man raising hand", + "woman raising hand", + "deaf person", + "deaf man", + "deaf woman", + "person bowing", + "man bowing", + "woman bowing", + "person facepalming", + "man facepalming", + "woman facepalming", + "person shrugging", + "man shrugging", + "woman shrugging", + "health worker", + "man health worker", + "woman health worker", + "student", + "man student", + "woman student", + "teacher", + "man teacher", + "woman teacher", + "judge", + "man judge", + "woman judge", + "farmer", + "man farmer", + "woman farmer", + "cook", + "man cook", + "woman cook", + "mechanic", + "man mechanic", + "woman mechanic", + "factory worker", + "man factory worker", + "woman factory worker", + "office worker", + "man office worker", + "woman office worker", + "scientist", + "man scientist", + "woman scientist", + "technologist", + "man technologist", + "woman technologist", + "singer", + "man singer", + "woman singer", + "artist", + "man artist", + "woman artist", + "pilot", + "man pilot", + "woman pilot", + "astronaut", + "man astronaut", + "woman astronaut", + "firefighter", + "man firefighter", + "woman firefighter", + "police officer", + "man police officer", + "woman police officer", + "detective", + "man detective", + "woman detective", + "guard", + "man guard", + "woman guard", + "ninja", + "construction worker", + "man construction worker", + "woman construction worker", + "prince", + "princess", + "person wearing turban", + "man wearing turban", + "woman wearing turban", + "person with skullcap", + "woman with headscarf", + "person in tuxedo", + "man in tuxedo", + "woman in tuxedo", + "person with veil", + "man with veil", + "woman with veil", + "pregnant woman", + "breast-feeding", + "woman feeding baby", + "man feeding baby", + "person feeding baby", + "baby angel", + "Santa Claus", + "Mrs. Claus", + "mx claus", + "superhero", + "man superhero", + "woman superhero", + "supervillain", + "man supervillain", + "woman supervillain", + "mage", + "man mage", + "woman mage", + "fairy", + "man fairy", + "woman fairy", + "vampire", + "man vampire", + "woman vampire", + "merperson", + "merman", + "mermaid", + "elf", + "man elf", + "woman elf", + "genie", + "man genie", + "woman genie", + "zombie", + "man zombie", + "woman zombie", + "person getting massage", + "man getting massage", + "woman getting massage", + "person getting haircut", + "man getting haircut", + "woman getting haircut", + "person walking", + "man walking", + "woman walking", + "person standing", + "man standing", + "woman standing", + "person kneeling", + "man kneeling", + "woman kneeling", + "person with white cane", + "man with white cane", + "woman with white cane", + "person in motorized wheelchair", + "man in motorized wheelchair", + "woman in motorized wheelchair", + "person in manual wheelchair", + "man in manual wheelchair", + "woman in manual wheelchair", + "person running", + "man running", + "woman running", + "woman dancing", + "man dancing", + "person in suit levitating", + "people with bunny ears", + "men with bunny ears", + "women with bunny ears", + "person in steamy room", + "man in steamy room", + "woman in steamy room", + "person climbing", + "man climbing", + "woman climbing", + "person fencing", + "horse racing", + "skier", + "snowboarder", + "person golfing", + "man golfing", + "woman golfing", + "person surfing", + "man surfing", + "woman surfing", + "person rowing boat", + "man rowing boat", + "woman rowing boat", + "person swimming", + "man swimming", + "woman swimming", + "person bouncing ball", + "man bouncing ball", + "woman bouncing ball", + "person lifting weights", + "man lifting weights", + "woman lifting weights", + "person biking", + "man biking", + "woman biking", + "person mountain biking", + "man mountain biking", + "woman mountain biking", + "person cartwheeling", + "man cartwheeling", + "woman cartwheeling", + "people wrestling", + "men wrestling", + "women wrestling", + "person playing water polo", + "man playing water polo", + "woman playing water polo", + "person playing handball", + "man playing handball", + "woman playing handball", + "person juggling", + "man juggling", + "woman juggling", + "person in lotus position", + "man in lotus position", + "woman in lotus position", + "person taking bath", + "person in bed", + "people holding hands", + "women holding hands", + "woman and man holding hands", + "men holding hands", + "kiss", + "kiss: woman, man", + "kiss: man, man", + "kiss: woman, woman", + "couple with heart", + "couple with heart: woman, man", + "couple with heart: man, man", + "couple with heart: woman, woman", + "family", + "family: man, woman, boy", + "family: man, woman, girl", + "family: man, woman, girl, boy", + "family: man, woman, boy, boy", + "family: man, woman, girl, girl", + "family: man, man, boy", + "family: man, man, girl", + "family: man, man, girl, boy", + "family: man, man, boy, boy", + "family: man, man, girl, girl", + "family: woman, woman, boy", + "family: woman, woman, girl", + "family: woman, woman, girl, boy", + "family: woman, woman, boy, boy", + "family: woman, woman, girl, girl", + "family: man, boy", + "family: man, boy, boy", + "family: man, girl", + "family: man, girl, boy", + "family: man, girl, girl", + "family: woman, boy", + "family: woman, boy, boy", + "family: woman, girl", + "family: woman, girl, boy", + "family: woman, girl, girl", + "speaking head", + "bust in silhouette", + "busts in silhouette", + "people hugging", + "footprints", + "monkey face", + "monkey", + "gorilla", + "orangutan", + "dog face", + "dog", + "guide dog", + "service dog", + "poodle", + "wolf", + "fox", + "raccoon", + "cat face", + "cat", + "black cat", + "lion", + "tiger face", + "tiger", + "leopard", + "horse face", + "horse", + "unicorn", + "zebra", + "deer", + "bison", + "cow face", + "ox", + "water buffalo", + "cow", + "pig face", + "pig", + "boar", + "pig nose", + "ram", + "ewe", + "goat", + "camel", + "two-hump camel", + "llama", + "giraffe", + "elephant", + "mammoth", + "rhinoceros", + "hippopotamus", + "mouse face", + "mouse", + "rat", + "hamster", + "rabbit face", + "rabbit", + "chipmunk", + "beaver", + "hedgehog", + "bat", + "bear", + "polar bear", + "koala", + "panda", + "sloth", + "otter", + "skunk", + "kangaroo", + "badger", + "paw prints", + "turkey", + "chicken", + "rooster", + "hatching chick", + "baby chick", + "front-facing baby chick", + "bird", + "penguin", + "dove", + "eagle", + "duck", + "swan", + "owl", + "dodo", + "feather", + "flamingo", + "peacock", + "parrot", + "frog", + "crocodile", + "turtle", + "lizard", + "snake", + "dragon face", + "dragon", + "sauropod", + "T-Rex", + "spouting whale", + "whale", + "dolphin", + "seal", + "fish", + "tropical fish", + "blowfish", + "shark", + "octopus", + "spiral shell", + "snail", + "butterfly", + "bug", + "ant", + "honeybee", + "beetle", + "lady beetle", + "cricket", + "cockroach", + "spider", + "spider web", + "scorpion", + "mosquito", + "fly", + "worm", + "microbe", + "bouquet", + "cherry blossom", + "white flower", + "rosette", + "rose", + "wilted flower", + "hibiscus", + "sunflower", + "blossom", + "tulip", + "seedling", + "potted plant", + "evergreen tree", + "deciduous tree", + "palm tree", + "cactus", + "sheaf of rice", + "herb", + "shamrock", + "four leaf clover", + "maple leaf", + "fallen leaf", + "leaf fluttering in wind", + "grapes", + "melon", + "watermelon", + "tangerine", + "lemon", + "banana", + "pineapple", + "mango", + "red apple", + "green apple", + "pear", + "peach", + "cherries", + "strawberry", + "blueberries", + "kiwi fruit", + "tomato", + "olive", + "coconut", + "avocado", + "eggplant", + "potato", + "carrot", + "ear of corn", + "hot pepper", + "bell pepper", + "cucumber", + "leafy green", + "broccoli", + "garlic", + "onion", + "mushroom", + "peanuts", + "chestnut", + "bread", + "croissant", + "baguette bread", + "flatbread", + "pretzel", + "bagel", + "pancakes", + "waffle", + "cheese wedge", + "meat on bone", + "poultry leg", + "cut of meat", + "bacon", + "hamburger", + "french fries", + "pizza", + "hot dog", + "sandwich", + "taco", + "burrito", + "tamale", + "stuffed flatbread", + "falafel", + "egg", + "cooking", + "shallow pan of food", + "pot of food", + "fondue", + "bowl with spoon", + "green salad", + "popcorn", + "butter", + "salt", + "canned food", + "bento box", + "rice cracker", + "rice ball", + "cooked rice", + "curry rice", + "steaming bowl", + "spaghetti", + "roasted sweet potato", + "oden", + "sushi", + "fried shrimp", + "fish cake with swirl", + "moon cake", + "dango", + "dumpling", + "fortune cookie", + "takeout box", + "crab", + "lobster", + "shrimp", + "squid", + "oyster", + "soft ice cream", + "shaved ice", + "ice cream", + "doughnut", + "cookie", + "birthday cake", + "shortcake", + "cupcake", + "pie", + "chocolate bar", + "candy", + "lollipop", + "custard", + "honey pot", + "baby bottle", + "glass of milk", + "hot beverage", + "teapot", + "teacup without handle", + "sake", + "bottle with popping cork", + "wine glass", + "cocktail glass", + "tropical drink", + "beer mug", + "clinking beer mugs", + "clinking glasses", + "tumbler glass", + "cup with straw", + "bubble tea", + "beverage box", + "mate", + "ice", + "chopsticks", + "fork and knife with plate", + "fork and knife", + "spoon", + "kitchen knife", + "amphora", + "globe showing Europe-Africa", + "globe showing Americas", + "globe showing Asia-Australia", + "globe with meridians", + "world map", + "map of Japan", + "compass", + "snow-capped mountain", + "mountain", + "volcano", + "mount fuji", + "camping", + "beach with umbrella", + "desert", + "desert island", + "national park", + "stadium", + "classical building", + "building construction", + "brick", + "rock", + "wood", + "hut", + "houses", + "derelict house", + "house", + "house with garden", + "office building", + "Japanese post office", + "post office", + "hospital", + "bank", + "hotel", + "love hotel", + "convenience store", + "school", + "department store", + "factory", + "Japanese castle", + "castle", + "wedding", + "Tokyo tower", + "Statue of Liberty", + "church", + "mosque", + "hindu temple", + "synagogue", + "shinto shrine", + "kaaba", + "fountain", + "tent", + "foggy", + "night with stars", + "cityscape", + "sunrise over mountains", + "sunrise", + "cityscape at dusk", + "sunset", + "bridge at night", + "hot springs", + "carousel horse", + "ferris wheel", + "roller coaster", + "barber pole", + "circus tent", + "locomotive", + "railway car", + "high-speed train", + "bullet train", + "train", + "metro", + "light rail", + "station", + "tram", + "monorail", + "mountain railway", + "tram car", + "bus", + "oncoming bus", + "trolleybus", + "minibus", + "ambulance", + "fire engine", + "police car", + "oncoming police car", + "taxi", + "oncoming taxi", + "automobile", + "oncoming automobile", + "sport utility vehicle", + "pickup truck", + "delivery truck", + "articulated lorry", + "tractor", + "racing car", + "motorcycle", + "motor scooter", + "manual wheelchair", + "motorized wheelchair", + "auto rickshaw", + "bicycle", + "kick scooter", + "skateboard", + "roller skate", + "bus stop", + "motorway", + "railway track", + "oil drum", + "fuel pump", + "police car light", + "horizontal traffic light", + "vertical traffic light", + "stop sign", + "construction", + "anchor", + "sailboat", + "canoe", + "speedboat", + "passenger ship", + "ferry", + "motor boat", + "ship", + "airplane", + "small airplane", + "airplane departure", + "airplane arrival", + "parachute", + "seat", + "helicopter", + "suspension railway", + "mountain cableway", + "aerial tramway", + "satellite", + "rocket", + "flying saucer", + "bellhop bell", + "luggage", + "hourglass done", + "hourglass not done", + "watch", + "alarm clock", + "stopwatch", + "timer clock", + "mantelpiece clock", + "twelve o’clock", + "twelve-thirty", + "one o’clock", + "one-thirty", + "two o’clock", + "two-thirty", + "three o’clock", + "three-thirty", + "four o’clock", + "four-thirty", + "five o’clock", + "five-thirty", + "six o’clock", + "six-thirty", + "seven o’clock", + "seven-thirty", + "eight o’clock", + "eight-thirty", + "nine o’clock", + "nine-thirty", + "ten o’clock", + "ten-thirty", + "eleven o’clock", + "eleven-thirty", + "new moon", + "waxing crescent moon", + "first quarter moon", + "waxing gibbous moon", + "full moon", + "waning gibbous moon", + "last quarter moon", + "waning crescent moon", + "crescent moon", + "new moon face", + "first quarter moon face", + "last quarter moon face", + "thermometer", + "sun", + "full moon face", + "sun with face", + "ringed planet", + "star", + "glowing star", + "shooting star", + "milky way", + "cloud", + "sun behind cloud", + "cloud with lightning and rain", + "sun behind small cloud", + "sun behind large cloud", + "sun behind rain cloud", + "cloud with rain", + "cloud with snow", + "cloud with lightning", + "tornado", + "fog", + "wind face", + "cyclone", + "rainbow", + "closed umbrella", + "umbrella", + "umbrella with rain drops", + "umbrella on ground", + "high voltage", + "snowflake", + "snowman", + "snowman without snow", + "comet", + "fire", + "droplet", + "water wave", + "jack-o-lantern", + "Christmas tree", + "fireworks", + "sparkler", + "firecracker", + "sparkles", + "balloon", + "party popper", + "confetti ball", + "tanabata tree", + "pine decoration", + "Japanese dolls", + "carp streamer", + "wind chime", + "moon viewing ceremony", + "red envelope", + "ribbon", + "wrapped gift", + "reminder ribbon", + "admission tickets", + "ticket", + "military medal", + "trophy", + "sports medal", + "1st place medal", + "2nd place medal", + "3rd place medal", + "soccer ball", + "baseball", + "softball", + "basketball", + "volleyball", + "american football", + "rugby football", + "tennis", + "flying disc", + "bowling", + "cricket game", + "field hockey", + "ice hockey", + "lacrosse", + "ping pong", + "badminton", + "boxing glove", + "martial arts uniform", + "goal net", + "flag in hole", + "ice skate", + "fishing pole", + "diving mask", + "running shirt", + "skis", + "sled", + "curling stone", + "direct hit", + "yo-yo", + "kite", + "pool 8 ball", + "crystal ball", + "magic wand", + "nazar amulet", + "video game", + "joystick", + "slot machine", + "game die", + "puzzle piece", + "teddy bear", + "piñata", + "nesting dolls", + "spade suit", + "heart suit", + "diamond suit", + "club suit", + "chess pawn", + "joker", + "mahjong red dragon", + "flower playing cards", + "performing arts", + "framed picture", + "artist palette", + "thread", + "sewing needle", + "yarn", + "knot", + "glasses", + "sunglasses", + "goggles", + "lab coat", + "safety vest", + "necktie", + "t-shirt", + "jeans", + "scarf", + "gloves", + "coat", + "socks", + "dress", + "kimono", + "sari", + "one-piece swimsuit", + "briefs", + "shorts", + "bikini", + "woman’s clothes", + "purse", + "handbag", + "clutch bag", + "shopping bags", + "backpack", + "thong sandal", + "man’s shoe", + "running shoe", + "hiking boot", + "flat shoe", + "high-heeled shoe", + "woman’s sandal", + "ballet shoes", + "woman’s boot", + "crown", + "woman’s hat", + "top hat", + "graduation cap", + "billed cap", + "military helmet", + "rescue worker’s helmet", + "prayer beads", + "lipstick", + "ring", + "gem stone", + "muted speaker", + "speaker low volume", + "speaker medium volume", + "speaker high volume", + "loudspeaker", + "megaphone", + "postal horn", + "bell", + "bell with slash", + "musical score", + "musical note", + "musical notes", + "studio microphone", + "level slider", + "control knobs", + "microphone", + "headphone", + "radio", + "saxophone", + "accordion", + "guitar", + "musical keyboard", + "trumpet", + "violin", + "banjo", + "drum", + "long drum", + "mobile phone", + "mobile phone with arrow", + "telephone", + "telephone receiver", + "pager", + "fax machine", + "battery", + "electric plug", + "laptop", + "desktop computer", + "printer", + "keyboard", + "computer mouse", + "trackball", + "computer disk", + "floppy disk", + "optical disk", + "dvd", + "abacus", + "movie camera", + "film frames", + "film projector", + "clapper board", + "television", + "camera", + "camera with flash", + "video camera", + "videocassette", + "magnifying glass tilted left", + "magnifying glass tilted right", + "candle", + "light bulb", + "flashlight", + "red paper lantern", + "diya lamp", + "notebook with decorative cover", + "closed book", + "open book", + "green book", + "blue book", + "orange book", + "books", + "notebook", + "ledger", + "page with curl", + "scroll", + "page facing up", + "newspaper", + "rolled-up newspaper", + "bookmark tabs", + "bookmark", + "label", + "money bag", + "coin", + "yen banknote", + "dollar banknote", + "euro banknote", + "pound banknote", + "money with wings", + "credit card", + "receipt", + "chart increasing with yen", + "envelope", + "e-mail", + "incoming envelope", + "envelope with arrow", + "outbox tray", + "inbox tray", + "package", + "closed mailbox with raised flag", + "closed mailbox with lowered flag", + "open mailbox with raised flag", + "open mailbox with lowered flag", + "postbox", + "ballot box with ballot", + "pencil", + "black nib", + "fountain pen", + "pen", + "paintbrush", + "crayon", + "memo", + "briefcase", + "file folder", + "open file folder", + "card index dividers", + "calendar", + "tear-off calendar", + "spiral notepad", + "spiral calendar", + "card index", + "chart increasing", + "chart decreasing", + "bar chart", + "clipboard", + "pushpin", + "round pushpin", + "paperclip", + "linked paperclips", + "straight ruler", + "triangular ruler", + "scissors", + "card file box", + "file cabinet", + "wastebasket", + "locked", + "unlocked", + "locked with pen", + "locked with key", + "key", + "old key", + "hammer", + "axe", + "pick", + "hammer and pick", + "hammer and wrench", + "dagger", + "crossed swords", + "pistol", + "boomerang", + "bow and arrow", + "shield", + "carpentry saw", + "wrench", + "screwdriver", + "nut and bolt", + "gear", + "clamp", + "balance scale", + "white cane", + "link", + "chains", + "hook", + "toolbox", + "magnet", + "ladder", + "alembic", + "test tube", + "petri dish", + "dna", + "microscope", + "telescope", + "satellite antenna", + "syringe", + "drop of blood", + "pill", + "adhesive bandage", + "stethoscope", + "door", + "elevator", + "mirror", + "window", + "bed", + "couch and lamp", + "chair", + "toilet", + "plunger", + "shower", + "bathtub", + "mouse trap", + "razor", + "lotion bottle", + "safety pin", + "broom", + "basket", + "roll of paper", + "bucket", + "soap", + "toothbrush", + "sponge", + "fire extinguisher", + "shopping cart", + "cigarette", + "coffin", + "headstone", + "funeral urn", + "moai", + "placard", + "ATM sign", + "litter in bin sign", + "potable water", + "wheelchair symbol", + "men’s room", + "women’s room", + "restroom", + "baby symbol", + "water closet", + "passport control", + "customs", + "baggage claim", + "left luggage", + "warning", + "children crossing", + "no entry", + "prohibited", + "no bicycles", + "no smoking", + "no littering", + "non-potable water", + "no pedestrians", + "no mobile phones", + "no one under eighteen", + "radioactive", + "biohazard", + "up arrow", + "up-right arrow", + "right arrow", + "down-right arrow", + "down arrow", + "down-left arrow", + "left arrow", + "up-left arrow", + "up-down arrow", + "left-right arrow", + "right arrow curving left", + "left arrow curving right", + "right arrow curving up", + "right arrow curving down", + "clockwise vertical arrows", + "counterclockwise arrows button", + "BACK arrow", + "END arrow", + "ON! arrow", + "SOON arrow", + "TOP arrow", + "place of worship", + "atom symbol", + "om", + "star of David", + "wheel of dharma", + "yin yang", + "latin cross", + "orthodox cross", + "star and crescent", + "peace symbol", + "menorah", + "dotted six-pointed star", + "Aries", + "Taurus", + "Gemini", + "Cancer", + "Leo", + "Virgo", + "Libra", + "Scorpio", + "Sagittarius", + "Capricorn", + "Aquarius", + "Pisces", + "Ophiuchus", + "shuffle tracks button", + "repeat button", + "repeat single button", + "play button", + "fast-forward button", + "next track button", + "play or pause button", + "reverse button", + "fast reverse button", + "last track button", + "upwards button", + "fast up button", + "downwards button", + "fast down button", + "pause button", + "stop button", + "record button", + "eject button", + "cinema", + "dim button", + "bright button", + "antenna bars", + "vibration mode", + "mobile phone off", + "female sign", + "male sign", + "transgender symbol", + "multiply", + "plus", + "minus", + "divide", + "infinity", + "double exclamation mark", + "exclamation question mark", + "question mark", + "white question mark", + "white exclamation mark", + "exclamation mark", + "wavy dash", + "currency exchange", + "heavy dollar sign", + "medical symbol", + "recycling symbol", + "fleur-de-lis", + "trident emblem", + "name badge", + "Japanese symbol for beginner", + "hollow red circle", + "check mark button", + "check box with check", + "check mark", + "cross mark", + "cross mark button", + "curly loop", + "double curly loop", + "part alternation mark", + "eight-spoked asterisk", + "eight-pointed star", + "sparkle", + "copyright", + "registered", + "trade mark", + "keycap: #", + "keycap: *", + "keycap: 0", + "keycap: 1", + "keycap: 2", + "keycap: 3", + "keycap: 4", + "keycap: 5", + "keycap: 6", + "keycap: 7", + "keycap: 8", + "keycap: 9", + "keycap: 10", + "input latin uppercase", + "input latin lowercase", + "input numbers", + "input symbols", + "input latin letters", + "A button (blood type)", + "AB button (blood type)", + "B button (blood type)", + "CL button", + "COOL button", + "FREE button", + "information", + "ID button", + "circled M", + "NEW button", + "NG button", + "O button (blood type)", + "OK button", + "P button", + "SOS button", + "UP! button", + "VS button", + "Japanese “here” button", + "Japanese “service charge” button", + "Japanese “monthly amount” button", + "Japanese “not free of charge” button", + "Japanese “reserved” button", + "Japanese “bargain” button", + "Japanese “discount” button", + "Japanese “free of charge” button", + "Japanese “prohibited” button", + "Japanese “acceptable” button", + "Japanese “application” button", + "Japanese “passing grade” button", + "Japanese “vacancy” button", + "Japanese “congratulations” button", + "Japanese “secret” button", + "Japanese “open for business” button", + "Japanese “no vacancy” button", + "red circle", + "orange circle", + "yellow circle", + "green circle", + "blue circle", + "purple circle", + "brown circle", + "black circle", + "white circle", + "red square", + "orange square", + "yellow square", + "green square", + "blue square", + "purple square", + "brown square", + "black large square", + "white large square", + "black medium square", + "white medium square", + "black medium-small square", + "white medium-small square", + "black small square", + "white small square", + "large orange diamond", + "large blue diamond", + "small orange diamond", + "small blue diamond", + "red triangle pointed up", + "red triangle pointed down", + "diamond with a dot", + "radio button", + "white square button", + "black square button", + "chequered flag", + "triangular flag", + "crossed flags", + "black flag", + "white flag", + "rainbow flag", + "transgender flag", + "pirate flag", + "flag: Ascension Island", + "flag: Andorra", + "flag: United Arab Emirates", + "flag: Afghanistan", + "flag: Antigua & Barbuda", + "flag: Anguilla", + "flag: Albania", + "flag: Armenia", + "flag: Angola", + "flag: Antarctica", + "flag: Argentina", + "flag: American Samoa", + "flag: Austria", + "flag: Australia", + "flag: Aruba", + "flag: Åland Islands", + "flag: Azerbaijan", + "flag: Bosnia & Herzegovina", + "flag: Barbados", + "flag: Bangladesh", + "flag: Belgium", + "flag: Burkina Faso", + "flag: Bulgaria", + "flag: Bahrain", + "flag: Burundi", + "flag: Benin", + "flag: St. Barthélemy", + "flag: Bermuda", + "flag: Brunei", + "flag: Bolivia", + "flag: Caribbean Netherlands", + "flag: Brazil", + "flag: Bahamas", + "flag: Bhutan", + "flag: Bouvet Island", + "flag: Botswana", + "flag: Belarus", + "flag: Belize", + "flag: Canada", + "flag: Cocos (Keeling) Islands", + "flag: Congo - Kinshasa", + "flag: Central African Republic", + "flag: Congo - Brazzaville", + "flag: Switzerland", + "flag: Côte d’Ivoire", + "flag: Cook Islands", + "flag: Chile", + "flag: Cameroon", + "flag: China", + "flag: Colombia", + "flag: Clipperton Island", + "flag: Costa Rica", + "flag: Cuba", + "flag: Cape Verde", + "flag: Curaçao", + "flag: Christmas Island", + "flag: Cyprus", + "flag: Czechia", + "flag: Germany", + "flag: Diego Garcia", + "flag: Djibouti", + "flag: Denmark", + "flag: Dominica", + "flag: Dominican Republic", + "flag: Algeria", + "flag: Ceuta & Melilla", + "flag: Ecuador", + "flag: Estonia", + "flag: Egypt", + "flag: Western Sahara", + "flag: Eritrea", + "flag: Spain", + "flag: Ethiopia", + "flag: European Union", + "flag: Finland", + "flag: Fiji", + "flag: Falkland Islands", + "flag: Micronesia", + "flag: Faroe Islands", + "flag: France", + "flag: Gabon", + "flag: United Kingdom", + "flag: Grenada", + "flag: Georgia", + "flag: French Guiana", + "flag: Guernsey", + "flag: Ghana", + "flag: Gibraltar", + "flag: Greenland", + "flag: Gambia", + "flag: Guinea", + "flag: Guadeloupe", + "flag: Equatorial Guinea", + "flag: Greece", + "flag: South Georgia & South Sandwich Islands", + "flag: Guatemala", + "flag: Guam", + "flag: Guinea-Bissau", + "flag: Guyana", + "flag: Hong Kong SAR China", + "flag: Heard & McDonald Islands", + "flag: Honduras", + "flag: Croatia", + "flag: Haiti", + "flag: Hungary", + "flag: Canary Islands", + "flag: Indonesia", + "flag: Ireland", + "flag: Israel", + "flag: Isle of Man", + "flag: India", + "flag: British Indian Ocean Territory", + "flag: Iraq", + "flag: Iran", + "flag: Iceland", + "flag: Italy", + "flag: Jersey", + "flag: Jamaica", + "flag: Jordan", + "flag: Japan", + "flag: Kenya", + "flag: Kyrgyzstan", + "flag: Cambodia", + "flag: Kiribati", + "flag: Comoros", + "flag: St. Kitts & Nevis", + "flag: North Korea", + "flag: South Korea", + "flag: Kuwait", + "flag: Cayman Islands", + "flag: Kazakhstan", + "flag: Laos", + "flag: Lebanon", + "flag: St. Lucia", + "flag: Liechtenstein", + "flag: Sri Lanka", + "flag: Liberia", + "flag: Lesotho", + "flag: Lithuania", + "flag: Luxembourg", + "flag: Latvia", + "flag: Libya", + "flag: Morocco", + "flag: Monaco", + "flag: Moldova", + "flag: Montenegro", + "flag: St. Martin", + "flag: Madagascar", + "flag: Marshall Islands", + "flag: North Macedonia", + "flag: Mali", + "flag: Myanmar (Burma)", + "flag: Mongolia", + "flag: Macao SAR China", + "flag: Northern Mariana Islands", + "flag: Martinique", + "flag: Mauritania", + "flag: Montserrat", + "flag: Malta", + "flag: Mauritius", + "flag: Maldives", + "flag: Malawi", + "flag: Mexico", + "flag: Malaysia", + "flag: Mozambique", + "flag: Namibia", + "flag: New Caledonia", + "flag: Niger", + "flag: Norfolk Island", + "flag: Nigeria", + "flag: Nicaragua", + "flag: Netherlands", + "flag: Norway", + "flag: Nepal", + "flag: Nauru", + "flag: Niue", + "flag: New Zealand", + "flag: Oman", + "flag: Panama", + "flag: Peru", + "flag: French Polynesia", + "flag: Papua New Guinea", + "flag: Philippines", + "flag: Pakistan", + "flag: Poland", + "flag: St. Pierre & Miquelon", + "flag: Pitcairn Islands", + "flag: Puerto Rico", + "flag: Palestinian Territories", + "flag: Portugal", + "flag: Palau", + "flag: Paraguay", + "flag: Qatar", + "flag: Réunion", + "flag: Romania", + "flag: Serbia", + "flag: Russia", + "flag: Rwanda", + "flag: Saudi Arabia", + "flag: Solomon Islands", + "flag: Seychelles", + "flag: Sudan", + "flag: Sweden", + "flag: Singapore", + "flag: St. Helena", + "flag: Slovenia", + "flag: Svalbard & Jan Mayen", + "flag: Slovakia", + "flag: Sierra Leone", + "flag: San Marino", + "flag: Senegal", + "flag: Somalia", + "flag: Suriname", + "flag: South Sudan", + "flag: São Tomé & Príncipe", + "flag: El Salvador", + "flag: Sint Maarten", + "flag: Syria", + "flag: Eswatini", + "flag: Tristan da Cunha", + "flag: Turks & Caicos Islands", + "flag: Chad", + "flag: French Southern Territories", + "flag: Togo", + "flag: Thailand", + "flag: Tajikistan", + "flag: Tokelau", + "flag: Timor-Leste", + "flag: Turkmenistan", + "flag: Tunisia", + "flag: Tonga", + "flag: Turkey", + "flag: Trinidad & Tobago", + "flag: Tuvalu", + "flag: Taiwan", + "flag: Tanzania", + "flag: Ukraine", + "flag: Uganda", + "flag: U.S. Outlying Islands", + "flag: United Nations", + "flag: United States", + "flag: Uruguay", + "flag: Uzbekistan", + "flag: Vatican City", + "flag: St. Vincent & Grenadines", + "flag: Venezuela", + "flag: British Virgin Islands", + "flag: U.S. Virgin Islands", + "flag: Vietnam", + "flag: Vanuatu", + "flag: Wallis & Futuna", + "flag: Samoa", + "flag: Kosovo", + "flag: Yemen", + "flag: Mayotte", + "flag: South Africa", + "flag: Zambia", + "flag: Zimbabwe", + "flag: England", + "flag: Scotland", + "flag: Wales", + }, + "category": { + "Smileys & Emotion", + "People & Body", + "Animals & Nature", + "Food & Drink", + "Travel & Places", + "Activities", + "Objects", + "Symbols", + "Flags", + }, + "alias": { + "grinning", + "smiley", + "smile", + "grin", + "laughing", + "satisfied", + "sweat_smile", + "rofl", + "joy", + "slightly_smiling_face", + "upside_down_face", + "wink", + "blush", + "innocent", + "smiling_face_with_three_hearts", + "heart_eyes", + "star_struck", + "kissing_heart", + "kissing", + "relaxed", + "kissing_closed_eyes", + "kissing_smiling_eyes", + "smiling_face_with_tear", + "yum", + "stuck_out_tongue", + "stuck_out_tongue_winking_eye", + "zany_face", + "stuck_out_tongue_closed_eyes", + "money_mouth_face", + "hugs", + "hand_over_mouth", + "shushing_face", + "thinking", + "zipper_mouth_face", + "raised_eyebrow", + "neutral_face", + "expressionless", + "no_mouth", + "smirk", + "unamused", + "roll_eyes", + "grimacing", + "lying_face", + "relieved", + "pensive", + "sleepy", + "drooling_face", + "sleeping", + "mask", + "face_with_thermometer", + "face_with_head_bandage", + "nauseated_face", + "vomiting_face", + "sneezing_face", + "hot_face", + "cold_face", + "woozy_face", + "dizzy_face", + "exploding_head", + "cowboy_hat_face", + "partying_face", + "disguised_face", + "sunglasses", + "nerd_face", + "monocle_face", + "confused", + "worried", + "slightly_frowning_face", + "frowning_face", + "open_mouth", + "hushed", + "astonished", + "flushed", + "pleading_face", + "frowning", + "anguished", + "fearful", + "cold_sweat", + "disappointed_relieved", + "cry", + "sob", + "scream", + "confounded", + "persevere", + "disappointed", + "sweat", + "weary", + "tired_face", + "yawning_face", + "triumph", + "rage", + "pout", + "angry", + "cursing_face", + "smiling_imp", + "imp", + "skull", + "skull_and_crossbones", + "hankey", + "poop", + "shit", + "clown_face", + "japanese_ogre", + "japanese_goblin", + "ghost", + "alien", + "space_invader", + "robot", + "smiley_cat", + "smile_cat", + "joy_cat", + "heart_eyes_cat", + "smirk_cat", + "kissing_cat", + "scream_cat", + "crying_cat_face", + "pouting_cat", + "see_no_evil", + "hear_no_evil", + "speak_no_evil", + "kiss", + "love_letter", + "cupid", + "gift_heart", + "sparkling_heart", + "heartpulse", + "heartbeat", + "revolving_hearts", + "two_hearts", + "heart_decoration", + "heavy_heart_exclamation", + "broken_heart", + "heart", + "orange_heart", + "yellow_heart", + "green_heart", + "blue_heart", + "purple_heart", + "brown_heart", + "black_heart", + "white_heart", + "100", + "anger", + "boom", + "collision", + "dizzy", + "sweat_drops", + "dash", + "hole", + "bomb", + "speech_balloon", + "eye_speech_bubble", + "left_speech_bubble", + "right_anger_bubble", + "thought_balloon", + "zzz", + "wave", + "raised_back_of_hand", + "raised_hand_with_fingers_splayed", + "hand", + "raised_hand", + "vulcan_salute", + "ok_hand", + "pinched_fingers", + "pinching_hand", + "v", + "crossed_fingers", + "love_you_gesture", + "metal", + "call_me_hand", + "point_left", + "point_right", + "point_up_2", + "middle_finger", + "fu", + "point_down", + "point_up", + "+1", + "thumbsup", + "-1", + "thumbsdown", + "fist_raised", + "fist", + "fist_oncoming", + "facepunch", + "punch", + "fist_left", + "fist_right", + "clap", + "raised_hands", + "open_hands", + "palms_up_together", + "handshake", + "pray", + "writing_hand", + "nail_care", + "selfie", + "muscle", + "mechanical_arm", + "mechanical_leg", + "leg", + "foot", + "ear", + "ear_with_hearing_aid", + "nose", + "brain", + "anatomical_heart", + "lungs", + "tooth", + "bone", + "eyes", + "eye", + "tongue", + "lips", + "baby", + "child", + "boy", + "girl", + "adult", + "blond_haired_person", + "man", + "bearded_person", + "red_haired_man", + "curly_haired_man", + "white_haired_man", + "bald_man", + "woman", + "red_haired_woman", + "person_red_hair", + "curly_haired_woman", + "person_curly_hair", + "white_haired_woman", + "person_white_hair", + "bald_woman", + "person_bald", + "blond_haired_woman", + "blonde_woman", + "blond_haired_man", + "older_adult", + "older_man", + "older_woman", + "frowning_person", + "frowning_man", + "frowning_woman", + "pouting_face", + "pouting_man", + "pouting_woman", + "no_good", + "no_good_man", + "ng_man", + "no_good_woman", + "ng_woman", + "ok_person", + "ok_man", + "ok_woman", + "tipping_hand_person", + "information_desk_person", + "tipping_hand_man", + "sassy_man", + "tipping_hand_woman", + "sassy_woman", + "raising_hand", + "raising_hand_man", + "raising_hand_woman", + "deaf_person", + "deaf_man", + "deaf_woman", + "bow", + "bowing_man", + "bowing_woman", + "facepalm", + "man_facepalming", + "woman_facepalming", + "shrug", + "man_shrugging", + "woman_shrugging", + "health_worker", + "man_health_worker", + "woman_health_worker", + "student", + "man_student", + "woman_student", + "teacher", + "man_teacher", + "woman_teacher", + "judge", + "man_judge", + "woman_judge", + "farmer", + "man_farmer", + "woman_farmer", + "cook", + "man_cook", + "woman_cook", + "mechanic", + "man_mechanic", + "woman_mechanic", + "factory_worker", + "man_factory_worker", + "woman_factory_worker", + "office_worker", + "man_office_worker", + "woman_office_worker", + "scientist", + "man_scientist", + "woman_scientist", + "technologist", + "man_technologist", + "woman_technologist", + "singer", + "man_singer", + "woman_singer", + "artist", + "man_artist", + "woman_artist", + "pilot", + "man_pilot", + "woman_pilot", + "astronaut", + "man_astronaut", + "woman_astronaut", + "firefighter", + "man_firefighter", + "woman_firefighter", + "police_officer", + "cop", + "policeman", + "policewoman", + "detective", + "male_detective", + "female_detective", + "guard", + "guardsman", + "guardswoman", + "ninja", + "construction_worker", + "construction_worker_man", + "construction_worker_woman", + "prince", + "princess", + "person_with_turban", + "man_with_turban", + "woman_with_turban", + "man_with_gua_pi_mao", + "woman_with_headscarf", + "person_in_tuxedo", + "man_in_tuxedo", + "woman_in_tuxedo", + "person_with_veil", + "man_with_veil", + "woman_with_veil", + "bride_with_veil", + "pregnant_woman", + "breast_feeding", + "woman_feeding_baby", + "man_feeding_baby", + "person_feeding_baby", + "angel", + "santa", + "mrs_claus", + "mx_claus", + "superhero", + "superhero_man", + "superhero_woman", + "supervillain", + "supervillain_man", + "supervillain_woman", + "mage", + "mage_man", + "mage_woman", + "fairy", + "fairy_man", + "fairy_woman", + "vampire", + "vampire_man", + "vampire_woman", + "merperson", + "merman", + "mermaid", + "elf", + "elf_man", + "elf_woman", + "genie", + "genie_man", + "genie_woman", + "zombie", + "zombie_man", + "zombie_woman", + "massage", + "massage_man", + "massage_woman", + "haircut", + "haircut_man", + "haircut_woman", + "walking", + "walking_man", + "walking_woman", + "standing_person", + "standing_man", + "standing_woman", + "kneeling_person", + "kneeling_man", + "kneeling_woman", + "person_with_probing_cane", + "man_with_probing_cane", + "woman_with_probing_cane", + "person_in_motorized_wheelchair", + "man_in_motorized_wheelchair", + "woman_in_motorized_wheelchair", + "person_in_manual_wheelchair", + "man_in_manual_wheelchair", + "woman_in_manual_wheelchair", + "runner", + "running", + "running_man", + "running_woman", + "woman_dancing", + "dancer", + "man_dancing", + "business_suit_levitating", + "dancers", + "dancing_men", + "dancing_women", + "sauna_person", + "sauna_man", + "sauna_woman", + "climbing", + "climbing_man", + "climbing_woman", + "person_fencing", + "horse_racing", + "skier", + "snowboarder", + "golfing", + "golfing_man", + "golfing_woman", + "surfer", + "surfing_man", + "surfing_woman", + "rowboat", + "rowing_man", + "rowing_woman", + "swimmer", + "swimming_man", + "swimming_woman", + "bouncing_ball_person", + "bouncing_ball_man", + "basketball_man", + "bouncing_ball_woman", + "basketball_woman", + "weight_lifting", + "weight_lifting_man", + "weight_lifting_woman", + "bicyclist", + "biking_man", + "biking_woman", + "mountain_bicyclist", + "mountain_biking_man", + "mountain_biking_woman", + "cartwheeling", + "man_cartwheeling", + "woman_cartwheeling", + "wrestling", + "men_wrestling", + "women_wrestling", + "water_polo", + "man_playing_water_polo", + "woman_playing_water_polo", + "handball_person", + "man_playing_handball", + "woman_playing_handball", + "juggling_person", + "man_juggling", + "woman_juggling", + "lotus_position", + "lotus_position_man", + "lotus_position_woman", + "bath", + "sleeping_bed", + "people_holding_hands", + "two_women_holding_hands", + "couple", + "two_men_holding_hands", + "couplekiss", + "couplekiss_man_woman", + "couplekiss_man_man", + "couplekiss_woman_woman", + "couple_with_heart", + "couple_with_heart_woman_man", + "couple_with_heart_man_man", + "couple_with_heart_woman_woman", + "family", + "family_man_woman_boy", + "family_man_woman_girl", + "family_man_woman_girl_boy", + "family_man_woman_boy_boy", + "family_man_woman_girl_girl", + "family_man_man_boy", + "family_man_man_girl", + "family_man_man_girl_boy", + "family_man_man_boy_boy", + "family_man_man_girl_girl", + "family_woman_woman_boy", + "family_woman_woman_girl", + "family_woman_woman_girl_boy", + "family_woman_woman_boy_boy", + "family_woman_woman_girl_girl", + "family_man_boy", + "family_man_boy_boy", + "family_man_girl", + "family_man_girl_boy", + "family_man_girl_girl", + "family_woman_boy", + "family_woman_boy_boy", + "family_woman_girl", + "family_woman_girl_boy", + "family_woman_girl_girl", + "speaking_head", + "bust_in_silhouette", + "busts_in_silhouette", + "people_hugging", + "footprints", + "monkey_face", + "monkey", + "gorilla", + "orangutan", + "dog", + "dog2", + "guide_dog", + "service_dog", + "poodle", + "wolf", + "fox_face", + "raccoon", + "cat", + "cat2", + "black_cat", + "lion", + "tiger", + "tiger2", + "leopard", + "horse", + "racehorse", + "unicorn", + "zebra", + "deer", + "bison", + "cow", + "ox", + "water_buffalo", + "cow2", + "pig", + "pig2", + "boar", + "pig_nose", + "ram", + "sheep", + "goat", + "dromedary_camel", + "camel", + "llama", + "giraffe", + "elephant", + "mammoth", + "rhinoceros", + "hippopotamus", + "mouse", + "mouse2", + "rat", + "hamster", + "rabbit", + "rabbit2", + "chipmunk", + "beaver", + "hedgehog", + "bat", + "bear", + "polar_bear", + "koala", + "panda_face", + "sloth", + "otter", + "skunk", + "kangaroo", + "badger", + "feet", + "paw_prints", + "turkey", + "chicken", + "rooster", + "hatching_chick", + "baby_chick", + "hatched_chick", + "bird", + "penguin", + "dove", + "eagle", + "duck", + "swan", + "owl", + "dodo", + "feather", + "flamingo", + "peacock", + "parrot", + "frog", + "crocodile", + "turtle", + "lizard", + "snake", + "dragon_face", + "dragon", + "sauropod", + "t-rex", + "whale", + "whale2", + "dolphin", + "flipper", + "seal", + "fish", + "tropical_fish", + "blowfish", + "shark", + "octopus", + "shell", + "snail", + "butterfly", + "bug", + "ant", + "bee", + "honeybee", + "beetle", + "lady_beetle", + "cricket", + "cockroach", + "spider", + "spider_web", + "scorpion", + "mosquito", + "fly", + "worm", + "microbe", + "bouquet", + "cherry_blossom", + "white_flower", + "rosette", + "rose", + "wilted_flower", + "hibiscus", + "sunflower", + "blossom", + "tulip", + "seedling", + "potted_plant", + "evergreen_tree", + "deciduous_tree", + "palm_tree", + "cactus", + "ear_of_rice", + "herb", + "shamrock", + "four_leaf_clover", + "maple_leaf", + "fallen_leaf", + "leaves", + "grapes", + "melon", + "watermelon", + "tangerine", + "orange", + "mandarin", + "lemon", + "banana", + "pineapple", + "mango", + "apple", + "green_apple", + "pear", + "peach", + "cherries", + "strawberry", + "blueberries", + "kiwi_fruit", + "tomato", + "olive", + "coconut", + "avocado", + "eggplant", + "potato", + "carrot", + "corn", + "hot_pepper", + "bell_pepper", + "cucumber", + "leafy_green", + "broccoli", + "garlic", + "onion", + "mushroom", + "peanuts", + "chestnut", + "bread", + "croissant", + "baguette_bread", + "flatbread", + "pretzel", + "bagel", + "pancakes", + "waffle", + "cheese", + "meat_on_bone", + "poultry_leg", + "cut_of_meat", + "bacon", + "hamburger", + "fries", + "pizza", + "hotdog", + "sandwich", + "taco", + "burrito", + "tamale", + "stuffed_flatbread", + "falafel", + "egg", + "fried_egg", + "shallow_pan_of_food", + "stew", + "fondue", + "bowl_with_spoon", + "green_salad", + "popcorn", + "butter", + "salt", + "canned_food", + "bento", + "rice_cracker", + "rice_ball", + "rice", + "curry", + "ramen", + "spaghetti", + "sweet_potato", + "oden", + "sushi", + "fried_shrimp", + "fish_cake", + "moon_cake", + "dango", + "dumpling", + "fortune_cookie", + "takeout_box", + "crab", + "lobster", + "shrimp", + "squid", + "oyster", + "icecream", + "shaved_ice", + "ice_cream", + "doughnut", + "cookie", + "birthday", + "cake", + "cupcake", + "pie", + "chocolate_bar", + "candy", + "lollipop", + "custard", + "honey_pot", + "baby_bottle", + "milk_glass", + "coffee", + "teapot", + "tea", + "sake", + "champagne", + "wine_glass", + "cocktail", + "tropical_drink", + "beer", + "beers", + "clinking_glasses", + "tumbler_glass", + "cup_with_straw", + "bubble_tea", + "beverage_box", + "mate", + "ice_cube", + "chopsticks", + "plate_with_cutlery", + "fork_and_knife", + "spoon", + "hocho", + "knife", + "amphora", + "earth_africa", + "earth_americas", + "earth_asia", + "globe_with_meridians", + "world_map", + "japan", + "compass", + "mountain_snow", + "mountain", + "volcano", + "mount_fuji", + "camping", + "beach_umbrella", + "desert", + "desert_island", + "national_park", + "stadium", + "classical_building", + "building_construction", + "bricks", + "rock", + "wood", + "hut", + "houses", + "derelict_house", + "house", + "house_with_garden", + "office", + "post_office", + "european_post_office", + "hospital", + "bank", + "hotel", + "love_hotel", + "convenience_store", + "school", + "department_store", + "factory", + "japanese_castle", + "european_castle", + "wedding", + "tokyo_tower", + "statue_of_liberty", + "church", + "mosque", + "hindu_temple", + "synagogue", + "shinto_shrine", + "kaaba", + "fountain", + "tent", + "foggy", + "night_with_stars", + "cityscape", + "sunrise_over_mountains", + "sunrise", + "city_sunset", + "city_sunrise", + "bridge_at_night", + "hotsprings", + "carousel_horse", + "ferris_wheel", + "roller_coaster", + "barber", + "circus_tent", + "steam_locomotive", + "railway_car", + "bullettrain_side", + "bullettrain_front", + "train2", + "metro", + "light_rail", + "station", + "tram", + "monorail", + "mountain_railway", + "train", + "bus", + "oncoming_bus", + "trolleybus", + "minibus", + "ambulance", + "fire_engine", + "police_car", + "oncoming_police_car", + "taxi", + "oncoming_taxi", + "car", + "red_car", + "oncoming_automobile", + "blue_car", + "pickup_truck", + "truck", + "articulated_lorry", + "tractor", + "racing_car", + "motorcycle", + "motor_scooter", + "manual_wheelchair", + "motorized_wheelchair", + "auto_rickshaw", + "bike", + "kick_scooter", + "skateboard", + "roller_skate", + "busstop", + "motorway", + "railway_track", + "oil_drum", + "fuelpump", + "rotating_light", + "traffic_light", + "vertical_traffic_light", + "stop_sign", + "construction", + "anchor", + "boat", + "sailboat", + "canoe", + "speedboat", + "passenger_ship", + "ferry", + "motor_boat", + "ship", + "airplane", + "small_airplane", + "flight_departure", + "flight_arrival", + "parachute", + "seat", + "helicopter", + "suspension_railway", + "mountain_cableway", + "aerial_tramway", + "artificial_satellite", + "rocket", + "flying_saucer", + "bellhop_bell", + "luggage", + "hourglass", + "hourglass_flowing_sand", + "watch", + "alarm_clock", + "stopwatch", + "timer_clock", + "mantelpiece_clock", + "clock12", + "clock1230", + "clock1", + "clock130", + "clock2", + "clock230", + "clock3", + "clock330", + "clock4", + "clock430", + "clock5", + "clock530", + "clock6", + "clock630", + "clock7", + "clock730", + "clock8", + "clock830", + "clock9", + "clock930", + "clock10", + "clock1030", + "clock11", + "clock1130", + "new_moon", + "waxing_crescent_moon", + "first_quarter_moon", + "moon", + "waxing_gibbous_moon", + "full_moon", + "waning_gibbous_moon", + "last_quarter_moon", + "waning_crescent_moon", + "crescent_moon", + "new_moon_with_face", + "first_quarter_moon_with_face", + "last_quarter_moon_with_face", + "thermometer", + "sunny", + "full_moon_with_face", + "sun_with_face", + "ringed_planet", + "star", + "star2", + "stars", + "milky_way", + "cloud", + "partly_sunny", + "cloud_with_lightning_and_rain", + "sun_behind_small_cloud", + "sun_behind_large_cloud", + "sun_behind_rain_cloud", + "cloud_with_rain", + "cloud_with_snow", + "cloud_with_lightning", + "tornado", + "fog", + "wind_face", + "cyclone", + "rainbow", + "closed_umbrella", + "open_umbrella", + "umbrella", + "parasol_on_ground", + "zap", + "snowflake", + "snowman_with_snow", + "snowman", + "comet", + "fire", + "droplet", + "ocean", + "jack_o_lantern", + "christmas_tree", + "fireworks", + "sparkler", + "firecracker", + "sparkles", + "balloon", + "tada", + "confetti_ball", + "tanabata_tree", + "bamboo", + "dolls", + "flags", + "wind_chime", + "rice_scene", + "red_envelope", + "ribbon", + "gift", + "reminder_ribbon", + "tickets", + "ticket", + "medal_military", + "trophy", + "medal_sports", + "1st_place_medal", + "2nd_place_medal", + "3rd_place_medal", + "soccer", + "baseball", + "softball", + "basketball", + "volleyball", + "football", + "rugby_football", + "tennis", + "flying_disc", + "bowling", + "cricket_game", + "field_hockey", + "ice_hockey", + "lacrosse", + "ping_pong", + "badminton", + "boxing_glove", + "martial_arts_uniform", + "goal_net", + "golf", + "ice_skate", + "fishing_pole_and_fish", + "diving_mask", + "running_shirt_with_sash", + "ski", + "sled", + "curling_stone", + "dart", + "yo_yo", + "kite", + "8ball", + "crystal_ball", + "magic_wand", + "nazar_amulet", + "video_game", + "joystick", + "slot_machine", + "game_die", + "jigsaw", + "teddy_bear", + "pi_ata", + "nesting_dolls", + "spades", + "hearts", + "diamonds", + "clubs", + "chess_pawn", + "black_joker", + "mahjong", + "flower_playing_cards", + "performing_arts", + "framed_picture", + "art", + "thread", + "sewing_needle", + "yarn", + "knot", + "eyeglasses", + "dark_sunglasses", + "goggles", + "lab_coat", + "safety_vest", + "necktie", + "shirt", + "tshirt", + "jeans", + "scarf", + "gloves", + "coat", + "socks", + "dress", + "kimono", + "sari", + "one_piece_swimsuit", + "swim_brief", + "shorts", + "bikini", + "womans_clothes", + "purse", + "handbag", + "pouch", + "shopping", + "school_satchel", + "thong_sandal", + "mans_shoe", + "shoe", + "athletic_shoe", + "hiking_boot", + "flat_shoe", + "high_heel", + "sandal", + "ballet_shoes", + "boot", + "crown", + "womans_hat", + "tophat", + "mortar_board", + "billed_cap", + "military_helmet", + "rescue_worker_helmet", + "prayer_beads", + "lipstick", + "ring", + "gem", + "mute", + "speaker", + "sound", + "loud_sound", + "loudspeaker", + "mega", + "postal_horn", + "bell", + "no_bell", + "musical_score", + "musical_note", + "notes", + "studio_microphone", + "level_slider", + "control_knobs", + "microphone", + "headphones", + "radio", + "saxophone", + "accordion", + "guitar", + "musical_keyboard", + "trumpet", + "violin", + "banjo", + "drum", + "long_drum", + "iphone", + "calling", + "phone", + "telephone", + "telephone_receiver", + "pager", + "fax", + "battery", + "electric_plug", + "computer", + "desktop_computer", + "printer", + "keyboard", + "computer_mouse", + "trackball", + "minidisc", + "floppy_disk", + "cd", + "dvd", + "abacus", + "movie_camera", + "film_strip", + "film_projector", + "clapper", + "tv", + "camera", + "camera_flash", + "video_camera", + "vhs", + "mag", + "mag_right", + "candle", + "bulb", + "flashlight", + "izakaya_lantern", + "lantern", + "diya_lamp", + "notebook_with_decorative_cover", + "closed_book", + "book", + "open_book", + "green_book", + "blue_book", + "orange_book", + "books", + "notebook", + "ledger", + "page_with_curl", + "scroll", + "page_facing_up", + "newspaper", + "newspaper_roll", + "bookmark_tabs", + "bookmark", + "label", + "moneybag", + "coin", + "yen", + "dollar", + "euro", + "pound", + "money_with_wings", + "credit_card", + "receipt", + "chart", + "email", + "envelope", + "e-mail", + "incoming_envelope", + "envelope_with_arrow", + "outbox_tray", + "inbox_tray", + "package", + "mailbox", + "mailbox_closed", + "mailbox_with_mail", + "mailbox_with_no_mail", + "postbox", + "ballot_box", + "pencil2", + "black_nib", + "fountain_pen", + "pen", + "paintbrush", + "crayon", + "memo", + "pencil", + "briefcase", + "file_folder", + "open_file_folder", + "card_index_dividers", + "date", + "calendar", + "spiral_notepad", + "spiral_calendar", + "card_index", + "chart_with_upwards_trend", + "chart_with_downwards_trend", + "bar_chart", + "clipboard", + "pushpin", + "round_pushpin", + "paperclip", + "paperclips", + "straight_ruler", + "triangular_ruler", + "scissors", + "card_file_box", + "file_cabinet", + "wastebasket", + "lock", + "unlock", + "lock_with_ink_pen", + "closed_lock_with_key", + "key", + "old_key", + "hammer", + "axe", + "pick", + "hammer_and_pick", + "hammer_and_wrench", + "dagger", + "crossed_swords", + "gun", + "boomerang", + "bow_and_arrow", + "shield", + "carpentry_saw", + "wrench", + "screwdriver", + "nut_and_bolt", + "gear", + "clamp", + "balance_scale", + "probing_cane", + "link", + "chains", + "hook", + "toolbox", + "magnet", + "ladder", + "alembic", + "test_tube", + "petri_dish", + "dna", + "microscope", + "telescope", + "satellite", + "syringe", + "drop_of_blood", + "pill", + "adhesive_bandage", + "stethoscope", + "door", + "elevator", + "mirror", + "window", + "bed", + "couch_and_lamp", + "chair", + "toilet", + "plunger", + "shower", + "bathtub", + "mouse_trap", + "razor", + "lotion_bottle", + "safety_pin", + "broom", + "basket", + "roll_of_paper", + "bucket", + "soap", + "toothbrush", + "sponge", + "fire_extinguisher", + "shopping_cart", + "smoking", + "coffin", + "headstone", + "funeral_urn", + "moyai", + "placard", + "atm", + "put_litter_in_its_place", + "potable_water", + "wheelchair", + "mens", + "womens", + "restroom", + "baby_symbol", + "wc", + "passport_control", + "customs", + "baggage_claim", + "left_luggage", + "warning", + "children_crossing", + "no_entry", + "no_entry_sign", + "no_bicycles", + "no_smoking", + "do_not_litter", + "non-potable_water", + "no_pedestrians", + "no_mobile_phones", + "underage", + "radioactive", + "biohazard", + "arrow_up", + "arrow_upper_right", + "arrow_right", + "arrow_lower_right", + "arrow_down", + "arrow_lower_left", + "arrow_left", + "arrow_upper_left", + "arrow_up_down", + "left_right_arrow", + "leftwards_arrow_with_hook", + "arrow_right_hook", + "arrow_heading_up", + "arrow_heading_down", + "arrows_clockwise", + "arrows_counterclockwise", + "back", + "end", + "on", + "soon", + "top", + "place_of_worship", + "atom_symbol", + "om", + "star_of_david", + "wheel_of_dharma", + "yin_yang", + "latin_cross", + "orthodox_cross", + "star_and_crescent", + "peace_symbol", + "menorah", + "six_pointed_star", + "aries", + "taurus", + "gemini", + "cancer", + "leo", + "virgo", + "libra", + "scorpius", + "sagittarius", + "capricorn", + "aquarius", + "pisces", + "ophiuchus", + "twisted_rightwards_arrows", + "repeat", + "repeat_one", + "arrow_forward", + "fast_forward", + "next_track_button", + "play_or_pause_button", + "arrow_backward", + "rewind", + "previous_track_button", + "arrow_up_small", + "arrow_double_up", + "arrow_down_small", + "arrow_double_down", + "pause_button", + "stop_button", + "record_button", + "eject_button", + "cinema", + "low_brightness", + "high_brightness", + "signal_strength", + "vibration_mode", + "mobile_phone_off", + "female_sign", + "male_sign", + "transgender_symbol", + "heavy_multiplication_x", + "heavy_plus_sign", + "heavy_minus_sign", + "heavy_division_sign", + "infinity", + "bangbang", + "interrobang", + "question", + "grey_question", + "grey_exclamation", + "exclamation", + "heavy_exclamation_mark", + "wavy_dash", + "currency_exchange", + "heavy_dollar_sign", + "medical_symbol", + "recycle", + "fleur_de_lis", + "trident", + "name_badge", + "beginner", + "o", + "white_check_mark", + "ballot_box_with_check", + "heavy_check_mark", + "x", + "negative_squared_cross_mark", + "curly_loop", + "loop", + "part_alternation_mark", + "eight_spoked_asterisk", + "eight_pointed_black_star", + "sparkle", + "copyright", + "registered", + "tm", + "hash", + "asterisk", + "zero", + "one", + "two", + "three", + "four", + "five", + "six", + "seven", + "eight", + "nine", + "keycap_ten", + "capital_abcd", + "abcd", + "1234", + "symbols", + "abc", + "a", + "ab", + "b", + "cl", + "cool", + "free", + "information_source", + "id", + "m", + "new", + "ng", + "o2", + "ok", + "parking", + "sos", + "up", + "vs", + "koko", + "sa", + "u6708", + "u6709", + "u6307", + "ideograph_advantage", + "u5272", + "u7121", + "u7981", + "accept", + "u7533", + "u5408", + "u7a7a", + "congratulations", + "secret", + "u55b6", + "u6e80", + "red_circle", + "orange_circle", + "yellow_circle", + "green_circle", + "large_blue_circle", + "purple_circle", + "brown_circle", + "black_circle", + "white_circle", + "red_square", + "orange_square", + "yellow_square", + "green_square", + "blue_square", + "purple_square", + "brown_square", + "black_large_square", + "white_large_square", + "black_medium_square", + "white_medium_square", + "black_medium_small_square", + "white_medium_small_square", + "black_small_square", + "white_small_square", + "large_orange_diamond", + "large_blue_diamond", + "small_orange_diamond", + "small_blue_diamond", + "small_red_triangle", + "small_red_triangle_down", + "diamond_shape_with_a_dot_inside", + "radio_button", + "white_square_button", + "black_square_button", + "checkered_flag", + "triangular_flag_on_post", + "crossed_flags", + "black_flag", + "white_flag", + "rainbow_flag", + "transgender_flag", + "pirate_flag", + "ascension_island", + "andorra", + "united_arab_emirates", + "afghanistan", + "antigua_barbuda", + "anguilla", + "albania", + "armenia", + "angola", + "antarctica", + "argentina", + "american_samoa", + "austria", + "australia", + "aruba", + "aland_islands", + "azerbaijan", + "bosnia_herzegovina", + "barbados", + "bangladesh", + "belgium", + "burkina_faso", + "bulgaria", + "bahrain", + "burundi", + "benin", + "st_barthelemy", + "bermuda", + "brunei", + "bolivia", + "caribbean_netherlands", + "brazil", + "bahamas", + "bhutan", + "bouvet_island", + "botswana", + "belarus", + "belize", + "canada", + "cocos_islands", + "congo_kinshasa", + "central_african_republic", + "congo_brazzaville", + "switzerland", + "cote_divoire", + "cook_islands", + "chile", + "cameroon", + "cn", + "colombia", + "clipperton_island", + "costa_rica", + "cuba", + "cape_verde", + "curacao", + "christmas_island", + "cyprus", + "czech_republic", + "de", + "diego_garcia", + "djibouti", + "denmark", + "dominica", + "dominican_republic", + "algeria", + "ceuta_melilla", + "ecuador", + "estonia", + "egypt", + "western_sahara", + "eritrea", + "es", + "ethiopia", + "eu", + "european_union", + "finland", + "fiji", + "falkland_islands", + "micronesia", + "faroe_islands", + "fr", + "gabon", + "gb", + "uk", + "grenada", + "georgia", + "french_guiana", + "guernsey", + "ghana", + "gibraltar", + "greenland", + "gambia", + "guinea", + "guadeloupe", + "equatorial_guinea", + "greece", + "south_georgia_south_sandwich_islands", + "guatemala", + "guam", + "guinea_bissau", + "guyana", + "hong_kong", + "heard_mcdonald_islands", + "honduras", + "croatia", + "haiti", + "hungary", + "canary_islands", + "indonesia", + "ireland", + "israel", + "isle_of_man", + "india", + "british_indian_ocean_territory", + "iraq", + "iran", + "iceland", + "it", + "jersey", + "jamaica", + "jordan", + "jp", + "kenya", + "kyrgyzstan", + "cambodia", + "kiribati", + "comoros", + "st_kitts_nevis", + "north_korea", + "kr", + "kuwait", + "cayman_islands", + "kazakhstan", + "laos", + "lebanon", + "st_lucia", + "liechtenstein", + "sri_lanka", + "liberia", + "lesotho", + "lithuania", + "luxembourg", + "latvia", + "libya", + "morocco", + "monaco", + "moldova", + "montenegro", + "st_martin", + "madagascar", + "marshall_islands", + "macedonia", + "mali", + "myanmar", + "mongolia", + "macau", + "northern_mariana_islands", + "martinique", + "mauritania", + "montserrat", + "malta", + "mauritius", + "maldives", + "malawi", + "mexico", + "malaysia", + "mozambique", + "namibia", + "new_caledonia", + "niger", + "norfolk_island", + "nigeria", + "nicaragua", + "netherlands", + "norway", + "nepal", + "nauru", + "niue", + "new_zealand", + "oman", + "panama", + "peru", + "french_polynesia", + "papua_new_guinea", + "philippines", + "pakistan", + "poland", + "st_pierre_miquelon", + "pitcairn_islands", + "puerto_rico", + "palestinian_territories", + "portugal", + "palau", + "paraguay", + "qatar", + "reunion", + "romania", + "serbia", + "ru", + "rwanda", + "saudi_arabia", + "solomon_islands", + "seychelles", + "sudan", + "sweden", + "singapore", + "st_helena", + "slovenia", + "svalbard_jan_mayen", + "slovakia", + "sierra_leone", + "san_marino", + "senegal", + "somalia", + "suriname", + "south_sudan", + "sao_tome_principe", + "el_salvador", + "sint_maarten", + "syria", + "swaziland", + "tristan_da_cunha", + "turks_caicos_islands", + "chad", + "french_southern_territories", + "togo", + "thailand", + "tajikistan", + "tokelau", + "timor_leste", + "turkmenistan", + "tunisia", + "tonga", + "tr", + "trinidad_tobago", + "tuvalu", + "taiwan", + "tanzania", + "ukraine", + "uganda", + "us_outlying_islands", + "united_nations", + "us", + "uruguay", + "uzbekistan", + "vatican_city", + "st_vincent_grenadines", + "venezuela", + "british_virgin_islands", + "us_virgin_islands", + "vietnam", + "vanuatu", + "wallis_futuna", + "samoa", + "kosovo", + "yemen", + "mayotte", + "south_africa", + "zambia", + "zimbabwe", + "england", + "scotland", + "wales", + }, + "tag": { + "smile", + "happy", + "joy", + "haha", + "laugh", + "pleased", + "hot", + "lol", + "laughing", + "tears", + "flirt", + "proud", + "angel", + "love", + "crush", + "eyes", + "blush", + "tongue", + "lick", + "prank", + "silly", + "goofy", + "wacky", + "rich", + "quiet", + "whoops", + "silence", + "hush", + "suspicious", + "meh", + "mute", + "smug", + "liar", + "whew", + "tired", + "zzz", + "sick", + "ill", + "hurt", + "barf", + "disgusted", + "achoo", + "heat", + "sweating", + "freezing", + "ice", + "groggy", + "mind", + "blown", + "celebration", + "birthday", + "cool", + "geek", + "glasses", + "nervous", + "surprise", + "impressed", + "wow", + "speechless", + "amazed", + "gasp", + "puppy", + "stunned", + "scared", + "shocked", + "oops", + "phew", + "sweat", + "sad", + "tear", + "cry", + "bawling", + "horror", + "struggling", + "upset", + "whine", + "angry", + "mad", + "annoyed", + "foul", + "devil", + "evil", + "horns", + "dead", + "danger", + "poison", + "pirate", + "crap", + "monster", + "halloween", + "ufo", + "game", + "retro", + "monkey", + "blind", + "ignore", + "deaf", + "lipstick", + "email", + "envelope", + "heart", + "chocolates", + "score", + "perfect", + "explode", + "star", + "water", + "workout", + "wind", + "blow", + "fast", + "boom", + "comment", + "thinking", + "sleeping", + "goodbye", + "highfive", + "stop", + "prosper", + "spock", + "victory", + "peace", + "luck", + "hopeful", + "approve", + "ok", + "disapprove", + "bury", + "power", + "attack", + "praise", + "applause", + "hooray", + "deal", + "please", + "hope", + "wish", + "beauty", + "manicure", + "flex", + "bicep", + "strong", + "hear", + "sound", + "listen", + "smell", + "look", + "see", + "watch", + "taste", + "kiss", + "child", + "newborn", + "mustache", + "father", + "dad", + "girls", + "halt", + "denied", + "information", + "respect", + "thanks", + "doctor", + "nurse", + "graduation", + "school", + "professor", + "justice", + "chef", + "business", + "research", + "coder", + "rockstar", + "painter", + "space", + "law", + "cop", + "sleuth", + "helmet", + "crown", + "royal", + "hijab", + "groom", + "marriage", + "wedding", + "nursing", + "christmas", + "santa", + "wizard", + "spa", + "exercise", + "marathon", + "dress", + "dancer", + "bunny", + "steamy", + "bouldering", + "basketball", + "gym", + "meditation", + "shower", + "couple", + "date", + "home", + "parents", + "user", + "users", + "group", + "team", + "feet", + "tracks", + "pet", + "dog", + "speed", + "desert", + "thanksgiving", + "slow", + "dinosaur", + "sea", + "beach", + "bug", + "germ", + "flowers", + "flower", + "spring", + "plant", + "wood", + "canada", + "autumn", + "leaf", + "fruit", + "aubergine", + "spicy", + "toast", + "meat", + "chicken", + "burger", + "breakfast", + "paella", + "curry", + "noodle", + "pasta", + "tempura", + "party", + "dessert", + "sweet", + "milk", + "cafe", + "espresso", + "green", + "bottle", + "bubbly", + "drink", + "summer", + "vacation", + "drinks", + "cheers", + "whisky", + "dining", + "dinner", + "cutlery", + "cut", + "chop", + "globe", + "world", + "international", + "global", + "travel", + "camping", + "karl", + "skyline", + "train", + "bicycle", + "911", + "emergency", + "semaphore", + "wip", + "ship", + "cruise", + "flight", + "orbit", + "launch", + "time", + "morning", + "night", + "weather", + "cloud", + "swirl", + "rain", + "beach_umbrella", + "lightning", + "thunder", + "winter", + "cold", + "burn", + "festival", + "shiny", + "present", + "award", + "contest", + "winner", + "gold", + "silver", + "bronze", + "sports", + "skating", + "target", + "pool", + "billiards", + "fortune", + "play", + "controller", + "console", + "dice", + "gambling", + "theater", + "drama", + "design", + "paint", + "shirt", + "formal", + "pants", + "bag", + "bags", + "sneaker", + "sport", + "running", + "shoe", + "king", + "queen", + "hat", + "classy", + "education", + "college", + "university", + "makeup", + "engaged", + "diamond", + "volume", + "announcement", + "notification", + "off", + "music", + "podcast", + "sing", + "earphones", + "rock", + "piano", + "smartphone", + "mobile", + "call", + "incoming", + "phone", + "desktop", + "screen", + "save", + "film", + "video", + "photo", + "search", + "zoom", + "idea", + "light", + "library", + "document", + "press", + "tag", + "dollar", + "cream", + "money", + "subscription", + "letter", + "shipping", + "note", + "directory", + "calendar", + "schedule", + "graph", + "metrics", + "stats", + "location", + "trash", + "security", + "private", + "lock", + "password", + "tool", + "shoot", + "weapon", + "archery", + "science", + "laboratory", + "investigate", + "signal", + "health", + "hospital", + "needle", + "medicine", + "wc", + "bath", + "toilet", + "cigarette", + "funeral", + "stone", + "accessibility", + "restroom", + "airport", + "limit", + "block", + "forbidden", + "return", + "sync", + "shuffle", + "loop", + "movie", + "wifi", + "confused", + "bang", + "environment", + "trademark", + "number", + "letters", + "numbers", + "alphabet", + "fresh", + "yes", + "help", + "milestone", + "finish", + "pride", + "keeling", + "ivory", + "china", + "flag", + "germany", + "spain", + "france", + "french", + "british", + "italy", + "japan", + "korea", + "burma", + "russia", + "turkey", + "united", + "america", + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/errors.go b/vendor/github.com/brianvoe/gofakeit/v7/data/errors.go new file mode 100644 index 0000000000..75647cf81c --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/errors.go @@ -0,0 +1,122 @@ +package data + +var Error = map[string][]string{ + "object": { + "argument", + "buffer", + "connection", + "database", + "header", + "hostname", + "method", + "object", + "parameter", + "pointer", + "port", + "protocol", + "request", + "response", + "server", + "service", + "signature", + "tag", + "undefined", + "url", + "uri", + "variable", + }, + "generic": { + "error", + "syntax error", + "requested {errorobject} is unavailable", + "failed to {hackerverb} {errorobject}", + "expected {errorobject} is undefined", + "[object Object]", + "no such variable", + "{errorobject} not initialized", + "variable assigned before declaration", + }, + "database": { + "sql error", + "database connection error", + "table does not exist", + "unique key constraint", + "table migration failed", + "bad connection", + "destination pointer is nil", + }, + "grpc": { + "connection refused", + "connection closed", + "connection is shut down", + "client protocol error", + }, + "http": { + "cross-origin-resource-policy error", + "feature not supported", + "trailer header without chunked transfer encoding", + "no multipart boundary param in Content-Type", + "request Content-Type isn't multipart/form-data", + "header too long", + "entity body too short", + "missing ContentLength in HEAD response", + "named cookie not present", + "invalid method", + "connection has been hijacked", + "request method or response status code does not allow body", + "wrote more than the declared Content-Length", + "{httpmethod} not allowed", + }, + "http_client": { // 400s + "bad request", // 400 + "unauthorized", // 401 + "payment required", // 402 + "forbidden", // 403 + "not found", // 404 + "method not allowed", // 405 + "not acceptable", // 406 + "proxy authentication required", // 407 + "request timeout", // 408 + "conflict", // 409 + "gone", // 410 + "length required", // 411 + "precondition failed", // 412 + "payload too large", // 413 + "URI too long", // 414 + "unsupported media type", // 415 + "range not satisfiable", // 416 + "expectation failed", // 417 + "im a teapot", // 418 + }, + "http_server": { // 500s + "internal server error", // 500 + "not implemented", // 501 + "bad gateway", // 502 + "service unavailable", // 503 + "gateway timeout", // 504 + "http version not supported", // 505 + "variant also negotiates", // 506 + "insufficient storage", // 507 + "loop detected", // 508 + "not extended", // 510 + "network authentication required", // 511 + }, + "runtime": { + "panic: runtime error: invalid memory address or nil pointer dereference", + "address out of bounds", + "undefined has no such property 'length'", + "not enough arguments", + "expected 2 arguments, got 3", + }, + "validation": { + "invalid format", + "missing required field", + "{inputname} is required", + "{inputname} max length exceeded", + "{inputname} must be at exactly 16 characters", + "{inputname} must be at exactly 32 bytes", + "failed to parse {inputname}", + "date is in the past", + "payment details cannot be verified", + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/files.go b/vendor/github.com/brianvoe/gofakeit/v7/data/files.go new file mode 100644 index 0000000000..363b840017 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/files.go @@ -0,0 +1,7 @@ +package data + +// Files consists of file information +var Files = map[string][]string{ + "mime_type": {"x-world/x-3dmf", "application/octet-stream", "application/x-authorware-bin", "application/x-authorware-map", "application/x-authorware-seg", "text/vnd.abc", "text/html", "video/animaflex", "application/postscript", "audio/aiff", "audio/x-aiff", "audio/aiff", "audio/x-aiff", "audio/aiff", "audio/x-aiff", "application/x-aim", "text/x-audiosoft-intra", "application/x-navi-animation", "application/x-nokia-9000-communicator-add-on-software", "application/mime", "application/octet-stream", "application/arj", "application/octet-stream", "image/x-jg", "video/x-ms-asf", "text/x-asm", "text/asp", "application/x-mplayer2", "video/x-ms-asf", "video/x-ms-asf-plugin", "audio/basic", "audio/x-au", "application/x-troff-msvideo", "video/avi", "video/msvideo", "video/x-msvideo", "video/avs-video", "application/x-bcpio", "application/mac-binary", "application/macbinary", "application/octet-stream", "application/x-binary", "application/x-macbinary", "image/bmp", "image/bmp", "image/x-windows-bmp", "application/book", "application/book", "application/x-bzip2", "application/x-bsh", "application/x-bzip", "application/x-bzip2", "text/plain", "text/x-c", "text/plain", "application/vnd.ms-pki.seccat", "text/plain", "text/x-c", "application/clariscad", "application/x-cocoa", "application/cdf", "application/x-cdf", "application/x-netcdf", "application/pkix-cert", "application/x-x509-ca-cert", "application/x-chat", "application/x-chat", "application/java", "application/java-byte-code", "application/x-java-class", "application/octet-stream", "text/plain", "text/plain", "application/x-cpio", "text/x-c", "application/mac-compactpro", "application/x-compactpro", "application/x-cpt", "application/pkcs-crl", "application/pkix-crl", "application/pkix-cert", "application/x-x509-ca-cert", "application/x-x509-user-cert", "application/x-csh", "text/x-script.csh", "application/x-pointplus", "text/css", "text/plain", "application/x-director", "application/x-deepv", "text/plain", "application/x-x509-ca-cert", "video/x-dv", "application/x-director", "video/dl", "video/x-dl", "application/msword", "application/msword", "application/commonground", "application/drafting", "application/octet-stream", "video/x-dv", "application/x-dvi", "drawing/x-dwf (old)", "model/vnd.dwf", "application/acad", "image/vnd.dwg", "image/x-dwg", "application/dxf", "image/vnd.dwg", "image/x-dwg", "application/x-director", "text/x-script.elisp", "application/x-bytecode.elisp (compiled elisp)", "application/x-elc", "application/x-envoy", "application/postscript", "application/x-esrehber", "text/x-setext", "application/envoy", "application/x-envoy", "application/octet-stream", "text/plain", "text/x-fortran", "text/x-fortran", "text/plain", "text/x-fortran", "application/vnd.fdf", "application/fractals", "image/fif", "video/fli", "video/x-fli", "image/florian", "text/vnd.fmi.flexstor", "video/x-atomic3d-feature", "text/plain", "text/x-fortran", "image/vnd.fpx", "image/vnd.net-fpx", "application/freeloader", "audio/make", "text/plain", "image/g3fax", "image/gif", "video/gl", "video/x-gl", "audio/x-gsm", "audio/x-gsm", "application/x-gsp", "application/x-gss", "application/x-gtar", "application/x-compressed", "application/x-gzip", "application/x-gzip", "multipart/x-gzip", "text/plain", "text/x-h", "application/x-hdf", "application/x-helpfile", "application/vnd.hp-hpgl", "text/plain", "text/x-h", "text/x-script", "application/hlp", "application/x-helpfile", "application/x-winhelp", "application/vnd.hp-hpgl", "application/vnd.hp-hpgl", "application/binhex", "application/binhex4", "application/mac-binhex", "application/mac-binhex40", "application/x-binhex40", "application/x-mac-binhex40", "application/hta", "text/x-component", "text/html", "text/html", "text/html", "text/webviewhtml", "text/html", "x-conference/x-cooltalk", "image/x-icon", "text/plain", "image/ief", "image/ief", "application/iges", "model/iges", "application/iges", "model/iges", "application/x-ima", "application/x-httpd-imap", "application/inf", "application/x-internett-signup", "application/x-ip2", "video/x-isvideo", "audio/it", "application/x-inventor", "i-world/i-vrml", "application/x-livescreen", "audio/x-jam", "text/plain", "text/x-java-source", "text/plain", "text/x-java-source", "application/x-java-commerce", "image/jpeg", "image/pjpeg", "image/jpeg", "image/jpeg", "image/pjpeg", "image/jpeg", "image/pjpeg", "image/jpeg", "image/pjpeg", "image/x-jps", "application/x-javascript", "image/jutvision", "audio/midi", "music/x-karaoke", "application/x-ksh", "text/x-script.ksh", "audio/nspaudio", "audio/x-nspaudio", "audio/x-liveaudio", "application/x-latex", "application/lha", "application/octet-stream", "application/x-lha", "application/octet-stream", "text/plain", "audio/nspaudio", "audio/x-nspaudio", "text/plain", "application/x-lisp", "text/x-script.lisp", "text/plain", "text/x-la-asf", "application/x-latex", "application/octet-stream", "application/x-lzh", "application/lzx", "application/octet-stream", "application/x-lzx", "text/plain", "text/x-m", "video/mpeg", "audio/mpeg", "video/mpeg", "audio/x-mpequrl", "application/x-troff-man", "application/x-navimap", "text/plain", "application/mbedlet", "application/mcad", "application/x-mathcad", "image/vasa", "text/mcf", "application/netmc", "application/x-troff-me", "message/rfc822", "message/rfc822", "application/x-midi", "audio/midi", "audio/x-mid", "audio/x-midi", "music/crescendo", "x-music/x-midi", "application/x-midi", "audio/midi", "audio/x-mid", "audio/x-midi", "music/crescendo", "x-music/x-midi", "application/x-frame", "application/x-mif", "message/rfc822", "www/mime", "video/x-motion-jpeg", "application/base64", "application/x-meme", "application/base64", "audio/mod", "audio/x-mod", "video/quicktime", "video/quicktime", "video/x-sgi-movie", "audio/mpeg", "audio/x-mpeg", "video/mpeg", "video/x-mpeg", "video/x-mpeq2a", "audio/mpeg3", "audio/x-mpeg-3", "video/mpeg", "video/x-mpeg", "audio/mpeg", "video/mpeg", "application/x-project", "video/mpeg", "video/mpeg", "audio/mpeg", "video/mpeg", "audio/mpeg", "application/vnd.ms-project", "application/x-project", "application/x-project", "application/x-project", "application/marc", "application/x-troff-ms", "video/x-sgi-movie", "audio/make", "application/x-vnd.audioexplosion.mzz", "image/naplps", "image/naplps", "application/x-netcdf", "application/vnd.nokia.configuration-message", "image/x-niff", "image/x-niff", "application/x-mix-transfer", "application/x-conference", "application/x-navidoc", "application/octet-stream", "application/oda", "application/x-omc", "application/x-omcdatamaker", "application/x-omcregerator", "text/x-pascal", "application/pkcs10", "application/x-pkcs10", "application/pkcs-12", "application/x-pkcs12", "application/x-pkcs7-signature", "application/pkcs7-mime", "application/x-pkcs7-mime", "application/pkcs7-mime", "application/x-pkcs7-mime", "application/x-pkcs7-certreqresp", "application/pkcs7-signature", "application/pro_eng", "text/pascal", "image/x-portable-bitmap", "application/vnd.hp-pcl", "application/x-pcl", "image/x-pict", "image/x-pcx", "chemical/x-pdb", "application/pdf", "audio/make", "audio/make.my.funk", "image/x-portable-graymap", "image/x-portable-greymap", "image/pict", "image/pict", "application/x-newton-compatible-pkg", "application/vnd.ms-pki.pko", "text/plain", "text/x-script.perl", "application/x-pixclscript", "image/x-xpixmap", "text/x-script.perl-module", "application/x-pagemaker", "application/x-pagemaker", "image/png", "application/x-portable-anymap", "image/x-portable-anymap", "application/mspowerpoint", "application/vnd.ms-powerpoint", "model/x-pov", "application/vnd.ms-powerpoint", "image/x-portable-pixmap", "application/mspowerpoint", "application/vnd.ms-powerpoint", "application/mspowerpoint", "application/powerpoint", "application/vnd.ms-powerpoint", "application/x-mspowerpoint", "application/mspowerpoint", "application/x-freelance", "application/pro_eng", "application/postscript", "application/octet-stream", "paleovu/x-pv", "application/vnd.ms-powerpoint", "text/x-script.phyton", "application/x-bytecode.python", "audio/vnd.qcelp", "x-world/x-3dmf", "x-world/x-3dmf", "image/x-quicktime", "video/quicktime", "video/x-qtc", "image/x-quicktime", "image/x-quicktime", "audio/x-pn-realaudio", "audio/x-pn-realaudio-plugin", "audio/x-realaudio", "audio/x-pn-realaudio", "application/x-cmu-raster", "image/cmu-raster", "image/x-cmu-raster", "image/cmu-raster", "text/x-script.rexx", "image/vnd.rn-realflash", "image/x-rgb", "application/vnd.rn-realmedia", "audio/x-pn-realaudio", "audio/mid", "audio/x-pn-realaudio", "audio/x-pn-realaudio", "audio/x-pn-realaudio-plugin", "application/ringing-tones", "application/vnd.nokia.ringing-tone", "application/vnd.rn-realplayer", "application/x-troff", "image/vnd.rn-realpix", "audio/x-pn-realaudio-plugin", "text/richtext", "text/vnd.rn-realtext", "application/rtf", "application/x-rtf", "text/richtext", "application/rtf", "text/richtext", "video/vnd.rn-realvideo", "text/x-asm", "audio/s3m", "application/octet-stream", "application/x-tbook", "application/x-lotusscreencam", "text/x-script.guile", "text/x-script.scheme", "video/x-scm", "text/plain", "application/sdp", "application/x-sdp", "application/sounder", "application/sea", "application/x-sea", "application/set", "text/sgml", "text/x-sgml", "text/sgml", "text/x-sgml", "application/x-bsh", "application/x-sh", "application/x-shar", "text/x-script.sh", "application/x-bsh", "application/x-shar", "text/html", "text/x-server-parsed-html", "audio/x-psid", "application/x-sit", "application/x-stuffit", "application/x-koan", "application/x-koan", "application/x-koan", "application/x-koan", "application/x-seelogo", "application/smil", "application/smil", "audio/basic", "audio/x-adpcm", "application/solids", "application/x-pkcs7-certificates", "text/x-speech", "application/futuresplash", "application/x-sprite", "application/x-sprite", "application/x-wais-source", "text/x-server-parsed-html", "application/streamingmedia", "application/vnd.ms-pki.certstore", "application/step", "application/sla", "application/vnd.ms-pki.stl", "application/x-navistyle", "application/step", "application/x-sv4cpio", "application/x-sv4crc", "image/vnd.dwg", "image/x-dwg", "application/x-world", "x-world/x-svr", "application/x-shockwave-flash", "application/x-troff", "text/x-speech", "application/x-tar", "application/toolbook", "application/x-tbook", "application/x-tcl", "text/x-script.tcl", "text/x-script.tcsh", "application/x-tex", "application/x-texinfo", "application/x-texinfo", "application/plain", "text/plain", "application/gnutar", "application/x-compressed", "image/tiff", "image/x-tiff", "image/tiff", "image/x-tiff", "application/x-troff", "audio/tsp-audio", "application/dsptype", "audio/tsplayer", "text/tab-separated-values", "image/florian", "text/plain", "text/x-uil", "text/uri-list", "text/uri-list", "application/i-deas", "text/uri-list", "text/uri-list", "application/x-ustar", "multipart/x-ustar", "application/octet-stream", "text/x-uuencode", "text/x-uuencode", "application/x-cdlink", "text/x-vcalendar", "application/vda", "video/vdo", "application/groupwise", "video/vivo", "video/vnd.vivo", "video/vivo", "video/vnd.vivo", "application/vocaltec-media-desc", "application/vocaltec-media-file", "audio/voc", "audio/x-voc", "video/vosaic", "audio/voxware", "audio/x-twinvq-plugin", "audio/x-twinvq", "audio/x-twinvq-plugin", "application/x-vrml", "model/vrml", "x-world/x-vrml", "x-world/x-vrt", "application/x-visio", "application/x-visio", "application/x-visio", "application/wordperfect6.0", "application/wordperfect6.1", "application/msword", "audio/wav", "audio/x-wav", "application/x-qpro", "image/vnd.wap.wbmp", "application/vnd.xara", "application/msword", "application/x-123", "windows/metafile", "text/vnd.wap.wml", "application/vnd.wap.wmlc", "text/vnd.wap.wmlscript", "application/vnd.wap.wmlscriptc", "application/msword", "application/wordperfect", "application/wordperfect", "application/wordperfect6.0", "application/wordperfect", "application/wordperfect", "application/x-wpwin", "application/x-lotus", "application/mswrite", "application/x-wri", "application/x-world", "model/vrml", "x-world/x-vrml", "model/vrml", "x-world/x-vrml", "text/scriplet", "application/x-wais-source", "application/x-wintalk", "image/x-xbitmap", "image/x-xbm", "image/xbm", "video/x-amt-demorun", "xgl/drawing", "image/vnd.xiff", "application/excel", "application/excel", "application/x-excel", "application/x-msexcel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/x-msexcel", "application/excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/x-msexcel", "audio/xm", "application/xml", "text/xml", "xgl/movie", "application/x-vnd.ls-xpix", "image/x-xpixmap", "image/xpm", "image/png", "video/x-amt-showrun", "image/x-xwd", "image/x-xwindowdump", "chemical/x-pdb", "application/x-compress", "application/x-compressed", "application/x-compressed", "application/x-zip-compressed", "application/zip", "multipart/x-zip", "application/octet-stream", "text/x-script.zsh"}, + "extension": {"doc", "docx", "log", "msg", "odt", "pages", "rtf", "tex", "txt", "wpd", "wps", "csv", "dat", "gbr", "ged", "key", "keychain", "pps", "ppt", "pptx", "sdf", "tar", "vcf", "xml", "aif", "iff", "mid", "mpa", "ra", "wav", "wma", "asf", "asx", "avi", "flv", "mov", "mpg", "rm", "srt", "swf", "vob", "wmv", "max", "obj", "bmp", "dds", "gif", "jpg", "png", "psd", "pspimage", "tga", "thm", "tif", "tiff", "yuv", "ai", "eps", "ps", "svg", "indd", "pct", "pdf", "xlr", "xls", "xlsx", "accdb", "db", "dbf", "mdb", "pdb", "sql", "apk", "app", "bat", "cgi", "com", "exe", "gadget", "jar", "pif", "vb", "wsf", "dem", "gam", "nes", "rom", "sav", "dwg", "dxf", "gpx", "kml", "kmz", "asp", "aspx", "cer", "cfm", "csr", "css", "htm", "html", "js", "jsp", "php", "rss", "xhtml", "crx", "plugin", "fnt", "fon", "otf", "ttf", "cab", "cpl", "cur", "deskthemepack", "dll", "dmp", "drv", "icns", "ico", "lnk", "sys", "cfg", "ini", "prf", "hqx", "mim", "uue", "cbr", "deb", "gz", "pkg", "rar", "rpm", "sitx", "gz", "zip", "zipx", "bin", "cue", "dmg", "iso", "mdf", "toast", "vcd", "class", "cpp", "cs", "dtd", "fla", "java", "lua", "pl", "py", "sh", "sln", "swift", "vcxproj", "xcodeproj", "bak", "tmp", "crdownload", "ics", "msi", "part", "torrent"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/food.go b/vendor/github.com/brianvoe/gofakeit/v7/data/food.go new file mode 100644 index 0000000000..0726c17e9b --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/food.go @@ -0,0 +1,13 @@ +package data + +// Food consists of food information +var Food = map[string][]string{ + "fruit": {"Apple", "Apricot", "Avocado", "Banana", "Bilberry", "Blackberry", "Blackcurrant", "Blueberry", "Currant", "Cherry", "Cherimoya", "Clementine", "Date", "Damson", "Durian", "Eggplant", "Elderberry", "Feijoa", "Gooseberry", "Grape", "Grapefruit", "Guava", "Huckleberry", "Jackfruit", "Jambul", "Kiwi", "Kumquat", "Legume", "Lemon", "Lime", "Lychee", "Mango", "Mangostine", "Melon", "Cantaloupe", "Honeydew", "Watermelon", "Rock melon", "Nectarine", "Orange", "Peach", "Pear", "Pitaya", "Physalis", "Plum", "Pineapple", "Pomegranate", "Raisin", "Raspberry", "Rambutan", "Redcurrant", "Satsuma", "Strawberry", "Tangerine", "Tomato", "Watermelon"}, + "vegetable": {"Amaranth Leaves", "Arrowroot", "Artichoke", "Arugula", "Asparagus", "Bamboo Shoots", "Beans, Green", "Beets", "Belgian Endive", "Bitter Melon*", "Bok Choy", "Broadbeans", "Broccoli", "Broccoli Rabe", "Brussel Sprouts", "Cabbage", "Carrot", "Cassava", "Cauliflower", "Celeriac", "Celery", "Chicory", "Collards", "Corn", "Crookneck", "Cucumber", "Daikon", "Dandelion Greens", "Eggplant", "Fennel", "Fiddleheads", "Ginger Root", "Horseradish", "Jicama", "Kale", "Kohlrabi", "Leeks", "Lettuce", "Mushrooms", "Mustard Greens", "Okra", "Onion", "Parsnip", "Peas", "Pepper", "Potato", "Pumpkin", "Radicchio", "Radishes", "Rutabaga", "Salsify", "Shallots", "Snow Peas", "Sorrel", "Soybeans", "Spaghetti Squash", "Spinach", "Squash", "Sugar Snap Peas", "Sweet Potato", "Swiss Chard", "Tomato", "Turnip", "Watercress", "Yam Root", "Zucchini"}, + "breakfast": {"berry cream cheese coffee cake", "broiled cinnamon toast", "breakfast casserole seasoned with country gravy", "mamas fruit cobbler", "shirleys plain or blueberry muffins", "toasted sunny side up egg and cheese sandwiches", "3 meat breakfast pizza", "moms cheat doughnuts", "old fashioned banana muffins", "blackberry breakfast bars", "pikelets australian pancakes", "pumpkin ginger scones with cinnamon chips", "tomato and mushroom omelette", "asparagus omelette wraps", "poached eggs technique", "scrambled egg sandwiches with onions and red peppers", "cheesecake kugel", "chicken and egg on rice oyako donburi", "bacon egg casserole", "ginger lemon muffins", "lizs morning glory muffins", "scrambled eggs oeufs brouills", "nats cucumber cream cheese bagel", "easy breakfast casserole", "6 week bran muffins auntie annes muffins", "awesome orange chocolate muffins", "baked swiss cheese omelet", "melt in your mouth blueberry muffins", "baked pears", "flaeskeaeggekage danish bacon egg pancake omelet", "sleepy twisted sisters g n g breakfast ramekin", "lemon buttercream pancakes with blueberries", "chef flowers simple sunday brunch omelette", "blueberry bakery muffins", "cardamom sour cream waffles", "sausage gravy for biscuits and gravy", "creamy scrambled eggs in the microwave", "english muffins with bacon butter", "original praline bacon recipe", "christmas caramel rolls easy", "blueberry banana happy face pancakes", "whole grain pancake mix", "fresh mango bread", "canadian bacon cheese omelet", "pumpkin french toast with toasted walnuts", "green mountain granola", "italian eggs with bacon", "a faster egg muffin", "country scrambled eggs", "everyday french breakfast baguette and jam with chocolate milk", "mexi eggs in a hole", "fruited irish oatmeal", "ham omelet deluxe", "danish bubble", "best buttermilk pancakes", "egg flowers", "vanilla fruit dip", "eggs in a basket", "grandmas swedish thin pancakes", "cinnamon maple granola", "wake up stuffed french breakfast panini", "quinoa muffins", "grilled cheese on raisin bread", "castillian hot chocolate", "banana blueberry oatmeal bread", "caramel pull aparts", "purple cow", "chili jack oven omelet", "cheery cherry muffins", "israeli breakfast salad", "muffin toppings", "migas lite for 2", "easy danish kringle", "oatmeal cookie granola"}, + "lunch": {"no bake hersheys bar pie", "worm sandwiches", "quesadillas for one or two", "pearls sesame noodles", "patty melt", "fresh tomato sandwiches saturday lunch on longmeadow farm", "onion burgers by john t edge the longmeadow farm", "fresh tomato and cucumber salad", "hoisin marinated wing pieces", "feta marinated", "spicy roasted butternut seeds pumpkin seeds", "honey chipotle pecans", "baked ham glazed with pineapple and chipotle peppers", "reuben sandwich our way", "toasted sunny side up egg and cheese sandwiches", "mrs allens date loaf", "3 meat breakfast pizza", "body and soul health muffins", "grilled blue cheese burgers", "kittencals beef burritos", "spinach and mandarin orange salad", "coconut pound cake", "scallop saute", "open faced crab sandwiches", "the traditional cyprus sandwich with halloumi onions and tomato", "toasted ham and cheese supreme", "scrambled egg sandwiches with onions and red peppers", "cucumber open faced sandwiches", "chicken and egg on rice oyako donburi", "blt sandwich", "grilled chicken pesto panini", "mushroom and chicken grilled quesadillas", "delicious cheesy bacon and green onion potato skins", "grilled chili lime chicken", "fried almonds", "the greatful bread sandwich", "egg salad club sandwiches or shrimp salad club", "nifs peanut butter banana muffins", "parmesan fish in the oven", "caramelized onion focaccia bread machine", "nats cucumber cream cheese bagel", "chicken with cashews", "lemon parsley popcorn", "not your ordinary chocolate chip cookies liqueur laced", "katos tasty salmon cream cheese surprise", "greek inspired salad", "tomato basil american cheese sandwich", "club sandwich", "bacon and egg salad sandwiches", "apple cheese bites", "two cheese panini with tomato olive pesto", "delicious and simple fruit dip", "tex mex 7 layer salad", "grilled peanut butter and jelly sandwich", "simply simple cucumber slices in vinegar dressing longmeadow", "ww greek inspired scrambled egg wraps", "baby greens with mustard vinaigrette", "patty melts", "ribs", "chocolate angel food cake", "spinach with lemon garlic", "green goddess dressing", "leftover rice muffins", "cajun garlic fingers", "fresh mango bread", "california crab salad", "hot salty nuts", "beef for tacos", "hidden valley wraps", "omas boterkoek dutch buttercake", "apple butterflies", "don t burn your fingers garlic bread", "beer wisconsin bratwurst", "salmon with bourbon and brown sugar glaze", "lemon coconut muffins", "the godfather of grilled cheese sandwiches", "green mountain granola", "tuna red onion and parsley salad", "tortellini skewers", "italian meatball hoagies", "crispy fried chicken spring rolls", "rotisserie style chicken in the crock pot", "creamed peas on toast", "bergy dim sum 5 steamed shrimp dumplings", "chocolate almond roca bar", "number 400 seafood casserole", "chocolate rainbow krispies treats", "spinach salad with blue cheese", "hash", "fake crab salad sandwiches", "guacamole stuffed deviled eggs", "weight watchers veggie barley soup 1 pt for 1 cup", "hummus with a twist", "bellissimo panini", "carls jr western bacon cheeseburger copycat by todd wilbur", "salami havarti and cole slaw sandwiches", "garlic herbed roasted red skin potatoes", "grilled cheese on raisin bread", "hearty grilled cheese", "italian deli wraps", "strammer max german warm sandwich", "quick elephant ears", "salata marouli romaine lettuce salad", "goat cheese black olive mashed potatoes", "tomato cucumber avocado sandwich", "purple cow", "chocolate coconut dream bars", "homemade popsicles", "ginger soy salmon", "sweet and sour pork balls", "spicy chicken soup with hints of lemongrass and coconut milk", "another buffalo wings recipe", "famous white wings", "amazing sweet italian sausage pasta soup", "sausage sandwich italian style", "copycat taco bell chicken enchilada bowl", "simple pan fried chicken breasts", "1 2 3 black bean salsa dip", "quick chile relleno casserole", "bacon spaghetti squash", "fantastic banana bran muffins", "garbanzo vegetarian burgers", "mediterranean tuna stuffed tomato", "sugared cinnamon almonds", "queen margherita pizza", "insanely easy chickpea salad", "habit forming shrimp dip", "turkey swiss panini", "pumpkin chocolate chip muffins", "grilled havarti and avocado sandwiches", "english muffin pizzas", "oatmeal cookie granola"}, + "dinner": {"kittencals caesar tortellini salad", "no bake hersheys bar pie", "lindas special potato salad", "kittencals parmesan orzo", "pearls sesame noodles", "roasted potatoes and green beans", "kittencals really great old fashioned lemonade", "lindas chunky garlic mashed potatoes", "kittencals pan fried asparagus", "cafe mocha latte", "fresh tomato and cucumber salad", "peanut butter gooey cake", "foolproof standing prime rib roast paula deen", "mamas fruit cobbler", "hoisin marinated wing pieces", "feta marinated", "the realtors cream cheese corn", "savory pita chips", "jalapeno pepper jelly chicken", "kashmir lamb with spinach", "oven fried zucchini sticks", "best ever bruschetta", "maple cinnamon coffee", "kick a fried onion rings", "guava mojito", "confit d oignon french onion marmalade", "flounder stuffed with shrimp and crabmeat", "mrs allens date loaf", "swedish cucumber salad pressgurka", "authentic pork lo mein chinese", "golden five spice sticky chicken", "basil tomato salad", "white chocolate cheesecake", "celery and blue cheese salad", "kittencals crock pot french dip roast", "lindas asian salmon", "spinach and mandarin orange salad", "coconut pound cake", "scallop saute", "spicy catfish tenders with cajun tartar sauce", "just like deweys candied walnut and grape salad", "strawberry pavlova", "grilled pork chops with lime cilantro garlic", "smoky barbecue beef brisket crock pot", "quick and easy chicken in cream sauce", "fried chorizo with garlic", "cucumber open faced sandwiches", "rachael rays mimosa", "tortellini bow tie pasta salad", "tonkatsu japanese pork cutlet", "mushroom and chicken grilled quesadillas", "delicious cheesy bacon and green onion potato skins", "roasted beet salad with horseradish cream dressing", "islands bananas foster", "apricot glazed roasted asparagus low fat", "frozen kahlua creme", "fried almonds", "just peachy grillin ribs rsc", "death by chocolate cake", "parmesan fish in the oven", "calico peas", "creamy cucumber dill dip", "emerils stewed black eyed peas", "german style eiskaffee iced coffee drink", "strawberry angel trifle", "spinach salad with feta cheese", "french napoleons", "ultimate crab and spinach manicotti with parmesan cheese sauce", "sweet and sour stir fry shrimp with broccoli and red bell pepper", "crispy noodle salad with sweet and sour dressing", "crunchy rosemary potatoes", "roasted cherry or grape tomatoes", "blackened skillet shrimp", "parslied new potatoes", "tropical baked chicken", "sweet and sour kielbasa kabobs", "fantastic mushrooms with garlic butter and parmesan", "asparagus with lemon butter crumbs", "creamy garlic prawns", "kittencals banana almond muffins with almond streusel", "ww shrimp scampi", "kittencals tender microwave corn with husks on", "nude beach", "kittencals greek garden salad with greek style dressing", "roasted broccoli with cherry tomatoes", "kittencals chicken cacciatore", "buttermilk mashed potatoes with country mustard", "tilapia in thai sauce", "cream cheese potato soup", "brown sugar roasted salmon with maple mustard dill sauce", "baby greens with mustard vinaigrette", "ribs", "new england roasted cornish game hens", "chocolate angel food cake", "creamy strawberries", "spinach with lemon garlic", "green goddess dressing", "jamaican pork tenderloin", "awesome twice baked potatoes", "sausage mushroom appetizers", "roasted garlic soup with parmesan", "crushed red potatoes with garlic", "15 minute no fry chicken enchiladas honest", "uncle bills caesar canadian style", "raspberry cranberry salad with sour cream cream cheese topping", "hot salty nuts", "acorn squash for 2", "pumpkin knot yeast rolls", "caramelized onion dip spread", "roasted asparagus with sage and lemon butter", "spanish garlic shrimp taverna", "baby greens with pears gorgonzola and pecans", "grilled or baked salmon with lavender", "ruth walls german apple cake", "healthy italian breadsticks or pizza crust", "strawberry and cream cheese parfait", "marinated grilled tuna steak", "kittencals extra crispy fried chicken breast", "de constructed chicken cordon bleu", "moroccan cinnamon coffee with orange flower water", "lemon and parsley potatoes", "bergy dim sum 5 steamed shrimp dumplings", "chocolate almond roca bar", "garlic mashed potatoes and cashew gravy", "number 400 seafood casserole", "sherry buttered shrimp", "spinach salad with blue cheese", "cookie monster fruit salad", "asian broccoli salad", "pink poodle", "butterflied leg of lamb with lots of garlic and rosemary", "gorgonzola and toasted walnut salad", "maple coffee", "chocolate chip bundt cake with chocolate glaze", "crock pot caramelized onion pot roast", "mashed potatoes with bacon and cheddar", "provencal olives", "creole potato salad", "wild addicting dip", "baby shower pink cloud punch", "i did it my way tossed salad", "lubys cafeteria butternut brownie pie", "spiced poached pears", "lemon cajun stir fry", "iced banana cream", "potato ham onion chipotle soup", "chicken and penne casserole", "kahlua hot chocolate", "chicken and yoghurt curry", "oriental asparagus and mushrooms", "guacamole stuffed deviled eggs", "orzo with tomatoes feta and green onions", "kathy dessert baked bananas zwt ii asia", "hummus with pine nuts turkish style", "caramel delight", "whipped cream cream cheese frosting", "broccoli and cranberry salad", "raspberry lemonade", "pan broiled steak with whiskey sauce", "t g i fridays mudslide", "herb crusted fish fillets", "agua de valencia knock your socks off spanish cava punch", "orange brownie", "jiffy punch", "steak balmoral and whisky sauce from the witchery by the castle", "julies alabama white sauce", "ww potato gratin 5 points", "bo kaap cape malay curry powder south african spice mixture", "garlic herbed roasted red skin potatoes", "tasty broccoli salad", "risotto with pesto and mascarpone", "red potato and green bean saute", "caribbean sunset", "sriracha honey roasted broccoli", "salata marouli romaine lettuce salad", "goat cheese black olive mashed potatoes", "swirled cranberry cheesecake", "curried pea soup", "long island iced tea applebees tgi fridays style", "chocolate coconut dream bars", "bbq salmon filet", "blue margaritas", "sweet and sour pork balls", "spanish shrimp", "orange glazed pork chops", "heavenly lemon bread pudding", "spicy chicken soup with hints of lemongrass and coconut milk", "sweet onion and mashed potato bake", "smoky clam chowder", "cornish game hens with peach glaze", "garlic prime rib", "german apple cake with cream cheese frosting", "amazing sweet italian sausage pasta soup", "fresh orange slices with honey and cinnamon", "blackened tuna bites with cajun mustard", "tuna cobb salad", "greek shrimp with rigatoni", "creamy beet salad", "caponata eggplant and lots of good things", "lemon and oregano lamb loin chops", "pork chops with apples stuffing", "bacon spaghetti squash", "layered bean taco dip", "creamy lemon tarts", "strawberry and baileys fool", "italian style roast", "sourdough rosemary potato bread", "cracker barrel baby carrots", "portuguese tomato rice", "chocolate covered dipped strawberries", "caf a la russe chocolate coffee", "herbed potato with cottage cheese", "your basic tossed salad", "panzanella salad with bacon tomato and basil"}, + "drink": {"water", "tea", "milk", "juice", "coffee", "soda", "smoothie", "beer", "wine"}, + "snack": {"hoisin marinated wing pieces", "feta marinated", "spicy roasted butternut seeds pumpkin seeds", "honey chipotle pecans", "best ever bruschetta", "body and soul health muffins", "kittencals beef burritos", "the traditional cyprus sandwich with halloumi onions and tomato", "delicious cheesy bacon and green onion potato skins", "fried almonds", "nifs peanut butter banana muffins", "lemon parsley popcorn", "not your ordinary chocolate chip cookies liqueur laced", "delicious and simple fruit dip", "fresh mango bread", "hot salty nuts", "omas boterkoek dutch buttercake", "apple butterflies", "lemon coconut muffins", "green mountain granola", "crispy fried chicken spring rolls", "guacamole stuffed deviled eggs", "hummus with a twist", "quick elephant ears", "homemade popsicles", "1 2 3 black bean salsa dip", "fantastic banana bran muffins", "sugared cinnamon almonds", "pumpkin chocolate chip muffins", "oatmeal cookie granola"}, + "dessert": {"no bake hersheys bar pie", "big ol cowboy cookies", "crackle top molasses cookies", "old fashion oatmeal pie", "cranberry nut swirls", "butter balls", "peanut butter gooey cake", "mamas fruit cobbler", "pink stuff cherry pie filling pineapple dessert", "chocolate star cookies", "midsummer swedish strawberry compote jordgubbskrm", "foolproof one bowl banana cake", "creamy apple dessert", "walnut chews", "yummy bread pudding", "white chocolate cheesecake", "hersheys kiss peanut butter cookies", "coconut pound cake", "frosted rhubarb cookies", "strawberry pavlova", "cookies n cream ice cream", "perfect pumpkin pie", "gluten free dutch sugar cookies", "raw apple crumble no bake", "cheesecake kugel", "moo less chocolate pie", "chocolate macadamia nut brownies", "disneyland snickerdoodles", "islands bananas foster", "frozen kahlua creme", "nifs peanut butter banana muffins", "peach cobbler with oatmeal cookie topping", "christmas cardamom butter cookies", "death by chocolate cake", "moms southern pecan pie", "the best brownies ever", "jerrys chocolate ice cream", "strawberry angel trifle", "zucchini mock apple pie", "low fat chocolate peanut butter dessert", "creamy raspberry mallow pie", "french napoleons", "pie crust cinnamon rolls", "not your ordinary chocolate chip cookies liqueur laced", "foolproof dark chocolate fudge", "whole wheat sugar cookies", "awesome kahlua cake", "up those antioxidants with blueberry sauce", "grammie millers swedish apple pie", "glendas flourless peanut butter cookies", "my best banana pudding dessert", "viskos praline sauce", "perfect purple punch", "reindeer bark", "lindas bloodshot eyeballs", "moroccan fruit salad", "apple dumpling bake", "simons pumpkin bread pudding", "baileys flourless peanut butter cookies", "a 1 cherry cobbler tart a1", "monkey balls", "chocolate angel food cake", "creamy strawberries", "harvest cake", "deep dark chocolate moist cake", "spooktacular halloween graveyard cake", "cream cheese walnut drop cookies", "omas boterkoek dutch buttercake", "kates basic crepes", "banana spice bars", "ruth walls german apple cake", "low fat low cholesterol chocolate cake cupcakes", "lower fat peanut butter rice krispies bars", "nutella rolls", "fruit salad pudding", "strawberry and cream cheese parfait", "apple dessert quick", "betty crocker chocolate chip cookies 1971 mens favorites 22", "so there reeses peanut butter bars", "moms buttery apple cake", "chocolate almond roca bar", "turtles", "sesame toffee", "chocolate rainbow krispies treats", "dirt cups for kids", "ultimate seven layer bars", "raisin oat cookies", "snickers bar cookies", "french pie pastry", "sour cream pumpkin bundt cake", "microwave nut brittle", "cinnamon rolls buns", "nutella mousse", "blueberry sour cream cake", "angelic strawberry frozen yogurt", "chocolate chip bundt cake with chocolate glaze", "creole cake", "apricot banana squares", "banana snack cake with delicious cream cheese frosting", "pineapple coconut empanadas", "awesome chocolate butterscotch chip cookies", "easy homemade almond roca", "sonic strawberry cheesecake shake", "lubys cafeteria butternut brownie pie", "spiced poached pears", "chocolate mocha pudding low carb", "iced banana cream", "kathy dessert baked bananas zwt ii asia", "whipped cream cream cheese frosting", "italian biscotti al la syd", "died and went to heaven chocolate cake diabetic version", "coffee and chocolate pudding", "mimis maine blueberry cobbler", "cherry cola float", "linzer bars", "confectioners sugar cookies", "double chocolate mint chip cookies", "quick elephant ears", "swirled cranberry cheesecake", "mexican rice pudding", "eclair torte", "spiced pumpkin pie", "caramel breakfast cake", "lime granita", "chocolate coconut dream bars", "blueberry banana pie", "grannys gingersnaps", "homemade popsicles", "heavenly lemon bread pudding", "pizzelles", "mckinley tea cakes", "lazy day cobbler", "old school deja vu chocolate peanut butter squares", "cheesecake pie", "aunt zanas amish sugar cookies eggless", "amish cream pie", "chocolate chip cookie dough ice cream", "snickerdoodles dream", "chocolate cheese fudge", "german apple cake with cream cheese frosting", "fresh orange slices with honey and cinnamon", "frozen oreo cookie dessert", "blueberry crunch", "amaretto bon bon balls", "red cherry pie", "creamy lemon tarts", "brownie truffles", "strawberry and baileys fool", "easy danish kringle", "chocolate covered dipped strawberries", "caf a la russe chocolate coffee"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/hacker.go b/vendor/github.com/brianvoe/gofakeit/v7/data/hacker.go new file mode 100644 index 0000000000..08a5f86ad5 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/hacker.go @@ -0,0 +1,20 @@ +package data + +// Hacker consists of random hacker phrases +var Hacker = map[string][]string{ + "abbreviation": {"TCP", "HTTP", "SDD", "RAM", "GB", "CSS", "SSL", "AGP", "SQL", "FTP", "PCI", "AI", "ADP", "RSS", "XML", "EXE", "COM", "HDD", "THX", "SMTP", "SMS", "USB", "PNG", "SAS", "IB", "SCSI", "JSON", "XSS", "JBOD"}, + "adjective": {"auxiliary", "primary", "back-end", "digital", "open-source", "virtual", "cross-platform", "redundant", "online", "haptic", "multi-byte", "bluetooth", "wireless", "1080p", "neural", "optical", "solid state", "mobile"}, + "noun": {"driver", "protocol", "bandwidth", "panel", "microchip", "program", "port", "card", "array", "interface", "system", "sensor", "firewall", "hard drive", "pixel", "alarm", "feed", "monitor", "application", "transmitter", "bus", "circuit", "capacitor", "matrix"}, + "verb": {"back up", "bypass", "hack", "override", "compress", "copy", "navigate", "index", "connect", "generate", "quantify", "calculate", "synthesize", "input", "transmit", "program", "reboot", "parse", "read", "write", "load", "render", "validate", "verify", "sign", "decrypt", "encrypt", "construct", "deconstruct", "compile", "transpile", "bundle", "lock", "unlock", "buffer", "format"}, + "ingverb": {"backing up", "bypassing", "hacking", "overriding", "compressing", "copying", "navigating", "indexing", "connecting", "generating", "quantifying", "calculating", "synthesizing", "transmitting", "programming", "parsing"}, + "phrase": { + "If we {hackerverb} the {hackernoun}, we can get to the {hackerabbreviation} {hackernoun} through the {hackeradjective} {hackerabbreviation} {hackernoun}!", + "We need to {hackerverb} the {hackeradjective} {hackerabbreviation} {hackernoun}!", + "Try to {hackerverb} the {hackerabbreviation} {hackernoun}, maybe it will {hackerverb} the {hackeradjective} {hackernoun}!", + "You can't {hackerverb} the {hackernoun} without {hackeringverb} the {hackeradjective} {hackerabbreviation} {hackernoun}!", + "Use the {hackeradjective} {hackerabbreviation} {hackernoun}, then you can {hackerverb} the {hackeradjective} {hackernoun}!", + "The {hackerabbreviation} {hackernoun} is down, {hackerverb} the {hackeradjective} {hackernoun} so we can {hackerverb} the {hackerabbreviation} {hackernoun}!", + "{hackeringverb} the {hackernoun} won't do anything, we need to {hackerverb} the {hackeradjective} {hackerabbreviation} {hackernoun}!", + "I'll {hackerverb} the {hackeradjective} {hackerabbreviation} {hackernoun}, that should {hackerverb} the {hackerabbreviation} {hackernoun}!", + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/hipster.go b/vendor/github.com/brianvoe/gofakeit/v7/data/hipster.go new file mode 100644 index 0000000000..f036f4639b --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/hipster.go @@ -0,0 +1,6 @@ +package data + +// Hipster consists of random hipster words +var Hipster = map[string][]string{ + "word": {"Wes Anderson", "chicharrones", "narwhal", "food truck", "marfa", "aesthetic", "keytar", "art party", "sustainable", "forage", "mlkshk", "gentrify", "locavore", "swag", "hoodie", "microdosing", "VHS", "before they sold out", "pabst", "plaid", "Thundercats", "freegan", "scenester", "hella", "occupy", "truffaut", "raw denim", "beard", "post-ironic", "photo booth", "twee", "90's", "pitchfork", "cray", "cornhole", "kale chips", "pour-over", "yr", "five dollar toast", "kombucha", "you probably haven't heard of them", "mustache", "fixie", "try-hard", "franzen", "kitsch", "austin", "stumptown", "keffiyeh", "whatever", "tumblr", "DIY", "shoreditch", "biodiesel", "vegan", "pop-up", "banjo", "kogi", "cold-pressed", "letterpress", "chambray", "butcher", "synth", "trust fund", "hammock", "farm-to-table", "intelligentsia", "loko", "ugh", "offal", "poutine", "gastropub", "Godard", "jean shorts", "sriracha", "dreamcatcher", "leggings", "fashion axe", "church-key", "meggings", "tote bag", "disrupt", "readymade", "helvetica", "flannel", "meh", "roof", "hashtag", "knausgaard", "cronut", "schlitz", "green juice", "waistcoat", "normcore", "viral", "ethical", "actually", "fingerstache", "humblebrag", "deep v", "wayfarers", "tacos", "taxidermy", "selvage", "put a bird on it", "ramps", "portland", "retro", "kickstarter", "bushwick", "brunch", "distillery", "migas", "flexitarian", "XOXO", "small batch", "messenger bag", "heirloom", "tofu", "bicycle rights", "bespoke", "salvia", "wolf", "selfies", "echo", "park", "listicle", "craft beer", "chartreuse", "sartorial", "pinterest", "mumblecore", "kinfolk", "vinyl", "etsy", "umami", "8-bit", "polaroid", "banh mi", "crucifix", "bitters", "brooklyn", "PBR&B", "drinking", "vinegar", "squid", "tattooed", "skateboard", "vice", "authentic", "literally", "lomo", "celiac", "health", "goth", "artisan", "chillwave", "blue bottle", "pickled", "next level", "neutra", "organic", "Yuccie", "paleo", "blog", "single-origin coffee", "seitan", "street", "gluten-free", "mixtape", "venmo", "irony", "everyday", "carry", "slow-carb", "3 wolf moon", "direct trade", "lo-fi", "tousled", "tilde", "semiotics", "cred", "chia", "master", "cleanse", "ennui", "quinoa", "pug", "iPhone", "fanny pack", "cliche", "cardigan", "asymmetrical", "meditation", "YOLO", "typewriter", "pork belly", "shabby chic", "+1", "lumbersexual", "williamsburg"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/html.go b/vendor/github.com/brianvoe/gofakeit/v7/data/html.go new file mode 100644 index 0000000000..5787edd8ae --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/html.go @@ -0,0 +1,7 @@ +package data + +// Html consists of various html information +var Html = map[string][]string{ + "svg": {"rect", "circle", "ellipse", "line", "polyline", "polygon"}, + "input_name": {"title", "first_name", "last_name", "suffix", "address", "postal_code", "city", "state", "country", "date_of_birth", "card_number", "description", "message", "status"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/internet.go b/vendor/github.com/brianvoe/gofakeit/v7/data/internet.go new file mode 100644 index 0000000000..ae7561af91 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/internet.go @@ -0,0 +1,11 @@ +package data + +// Internet consists of various internet information +var Internet = map[string][]string{ + "browser": {"firefox", "chrome", "internetExplorer", "opera", "safari"}, + "domain_suffix": {"com", "biz", "info", "name", "net", "org", "io"}, + "http_method": {"HEAD", "GET", "POST", "PUT", "PATCH", "DELETE"}, + "http_version": {"HTTP/1.0", "HTTP/1.1", "HTTP/2.0"}, + "http_status_simple": {"200", "301", "302", "400", "404", "500"}, + "http_status_general": {"100", "200", "201", "203", "204", "205", "301", "302", "304", "400", "401", "403", "404", "405", "406", "416", "500", "501", "502", "503", "504"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/isbn.go b/vendor/github.com/brianvoe/gofakeit/v7/data/isbn.go new file mode 100644 index 0000000000..ba9fd3eee4 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/isbn.go @@ -0,0 +1,58 @@ +package data + +// Prefixes for ISBN standards +const ( + ISBN13Prefix = "978" + ISBN10Prefix = "979" +) + +// ISBNRule defines a registrant rule range and its length +type ISBNRule struct { + Min string + Max string + Length int +} + +// ISBNRules maps prefix -> registration group -> registrant rules +var ISBNRules = map[string]map[string][]ISBNRule{ + ISBN13Prefix: { + "0": { + {Min: "0000000", Max: "1999999", Length: 2}, + {Min: "2000000", Max: "2279999", Length: 3}, + {Min: "2280000", Max: "2289999", Length: 4}, + {Min: "2290000", Max: "6479999", Length: 3}, + {Min: "6480000", Max: "6489999", Length: 7}, + {Min: "6490000", Max: "6999999", Length: 3}, + {Min: "7000000", Max: "8499999", Length: 4}, + {Min: "8500000", Max: "8999999", Length: 5}, + {Min: "9000000", Max: "9499999", Length: 6}, + {Min: "9500000", Max: "9999999", Length: 7}, + }, + "1": { + {Min: "0000000", Max: "0999999", Length: 2}, + {Min: "1000000", Max: "3999999", Length: 3}, + {Min: "4000000", Max: "5499999", Length: 4}, + {Min: "5500000", Max: "7319999", Length: 5}, + {Min: "7320000", Max: "7399999", Length: 7}, + {Min: "7400000", Max: "8697999", Length: 5}, + {Min: "8698000", Max: "9729999", Length: 6}, + {Min: "9730000", Max: "9877999", Length: 4}, + {Min: "9878000", Max: "9989999", Length: 6}, + {Min: "9990000", Max: "9999999", Length: 7}, + }, + }, + ISBN10Prefix: { + "8": { + {Min: "0000000", Max: "1999999", Length: 2}, + {Min: "2000000", Max: "2279999", Length: 3}, + {Min: "2280000", Max: "2289999", Length: 4}, + {Min: "2290000", Max: "6479999", Length: 3}, + {Min: "6480000", Max: "6489999", Length: 7}, + {Min: "6490000", Max: "6999999", Length: 3}, + {Min: "7000000", Max: "8499999", Length: 4}, + {Min: "8500000", Max: "8999999", Length: 5}, + {Min: "9000000", Max: "9499999", Length: 6}, + {Min: "9500000", Max: "9999999", Length: 7}, + }, + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/job.go b/vendor/github.com/brianvoe/gofakeit/v7/data/job.go new file mode 100644 index 0000000000..905dd74ee0 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/job.go @@ -0,0 +1,8 @@ +package data + +// Job consists of job data +var Job = map[string][]string{ + "title": {"Administrator", "Agent", "Analyst", "Architect", "Assistant", "Associate", "Consultant", "Coordinator", "Designer", "Developer", "Director", "Engineer", "Executive", "Facilitator", "Liaison", "Manager", "Officer", "Orchestrator", "Planner", "Producer", "Representative", "Specialist", "Strategist", "Supervisor", "Technician"}, + "descriptor": {"Central", "Chief", "Corporate", "Customer", "Direct", "District", "Dynamic", "Dynamic", "Forward", "Future", "Global", "Human", "Internal", "International", "Investor", "Lead", "Legacy", "National", "Principal", "Product", "Regional", "Senior"}, + "level": {"Accountability", "Accounts", "Applications", "Assurance", "Brand", "Branding", "Communications", "Configuration", "Creative", "Data", "Directives", "Division", "Factors", "Functionality", "Group", "Identity", "Implementation", "Infrastructure", "Integration", "Interactions", "Intranet", "Marketing", "Markets", "Metrics", "Mobility", "Operations", "Optimization", "Paradigm", "Program", "Quality", "Research", "Response", "Security", "Solutions", "Tactics", "Usability", "Web"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/languages.go b/vendor/github.com/brianvoe/gofakeit/v7/data/languages.go new file mode 100644 index 0000000000..5fdfc9e6fe --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/languages.go @@ -0,0 +1,9 @@ +package data + +// Languages consists of address information +var Languages = map[string][]string{ + "short": {"aa", "ab", "ae", "af", "ak", "am", "an", "ar", "as", "av", "ay", "az", "ba", "be", "bg", "bh", "bi", "bm", "bn", "bo", "br", "bs", "ca", "ce", "ch", "co", "cr", "cs", "cv", "cy", "da", "de", "dv", "dz", "ee", "en", "eo", "es", "et", "eu", "fa", "ff", "fi", "fj", "fo", "fr", "fy", "ga", "gd", "gl", "gn", "gu", "gv", "ha", "he", "hi", "ho", "hr", "ht", "hu", "hy", "hz", "ia", "id", "ie", "ig", "ii", "ik", "io", "is", "it", "iu", "ja", "jv", "ka", "kg", "ki", "kj", "kk", "kl", "km", "kn", "ko", "kr", "ks", "ku", "kv", "kw", "ky", "la", "lb", "lg", "li", "ln", "lo", "lt", "lu", "lv", "mg", "mh", "mi", "mk", "ml", "mn", "mr", "ms", "mt", "my", "na", "ne", "ng", "nl", "no", "nv", "ny", "oc", "oj", "om", "or", "os", "pa", "pi", "pl", "ps", "pt", "qu", "rm", "rn", "ro", "ru", "rw", "sa", "sc", "sd", "se", "sg", "si", "sk", "sl", "sm", "sn", "so", "sq", "sr", "ss", "st", "su", "sv", "sw", "ta", "te", "tg", "th", "ti", "tk", "tl", "tn", "to", "tr", "ts", "tt", "tw", "ty", "ug", "uk", "ur", "uz", "ve", "vi", "wa", "wo", "xh", "yi", "yo", "za", "zh", "zu"}, + "long": {"Afar", "Abkhazian", "Avestan", "Afrikaans", "Akan", "Amharic", "Aragonese", "Arabic", "Assamese", "Avaric", "Aymara", "Azerbaijani", "Bashkir", "Belarusian", "Bulgarian", "Bihari", "Bislama", "Bambara", "Bengali", "Tibetan", "Breton", "Bosnian", "Catalan", "Chechen", "Chamorro", "Corsican", "Cree", "Czech", "Chuvash", "Welsh", "Danish", "German", "Divehi", "Dzongkha", "Ewe", "English", "Esperanto", "Spanish", "Estonian", "Basque", "Persian", "Fulah", "Finnish", "Fijian", "Faroese", "French", "Western Frisian", "Irish", "Gaelic", "Galician", "Guarani", "Gujarati", "Manx", "Hausa", "Hebrew", "Hindi", "Hiri Motu", "Croatian", "Haitian", "Hungarian", "Armenian", "Herero", "Interlingua", "Indonesian", "Interlingue", "Igbo", "Sichuan Yi", "Inupiaq", "Ido", "Icelandic", "Italian", "Inuktitut", "Japanese", "Javanese", "Georgian", "Kongo", "Kikuyu", "Kuanyama", "Kazakh", "Kalaallisut", "Central Khmer", "Kannada", "Korean", "Kanuri", "Kashmiri", "Kurdish", "Komi", "Cornish", "Kirghiz", "Latin", "Luxembourgish", "Ganda", "Limburgan", "Lingala", "Lao", "Lithuanian", "Luba-Katanga", "Latvian", "Malagasy", "Marshallese", "Maori", "Macedonian", "Malayalam", "Mongolian", "Marathi", "Malay", "Maltese", "Burmese", "Nauru", "Nepali", "Ndonga", "Dutch", "Norwegian", "Navajo", "Chichewa", "Occitan", "Ojibwa", "Oromo", "Oriya", "Ossetian", "Panjabi", "Pali", "Polish", "Pushto", "Portuguese", "Quechua", "Romansh", "Rundi", "Romanian", "Russian", "Kinyarwanda", "Sanskrit", "Sardinian", "Sindhi", "Northern Sami", "Sango", "Sinhala", "Slovak", "Slovenian", "Samoan", "Shona", "Somali", "Albanian", "Serbian", "Swati", "Sotho", "Sundanese", "Swedish", "Swahili", "Tamil", "Telugu", "Tajik", "Thai", "Tigrinya", "Turkmen", "Tagalog", "Tswana", "Tonga", "Turkish", "Tsonga", "Tatar", "Twi", "Tahitian", "Uighur", "Ukrainian", "Urdu", "Uzbek", "Venda", "Vietnamese", "Walloon", "Wolof", "Xhosa", "Yiddish", "Yoruba", "Zhuang", "Chinese", "Zulu"}, + "bcp": {"ar-SA", "cs-CZ", "da-DK", "de-DE", "el-GR", "en-AU", "en-GB", "en-IE", "en-US", "en-ZA", "es-ES", "es-MX", "fi-FI", "fr-CA", "fr-FR", "he-IL", "hi-IN", "hu-HU", "id-ID", "it-IT", "ja-JP", "ko-KR", "nl-BE", "nl-NL", "no-NO", "pl-PL", "pt-BR", "pt-PT", "ro-RO", "ru-RU", "sk-SK", "sv-SE", "th-TH", "tr-TR", "zh-CN", "zh-HK", "zh-TW"}, + "programming": {"A# .NET", "A# (Axiom)", "A-0 System", "A+", "A++", "ABAP", "ABC", "ABC ALGOL", "ABLE", "ABSET", "ABSYS", "ACC", "Accent", "Ace DASL", "ACL2", "ACT-III", "Action!", "ActionScript", "Ada", "Adenine", "Agda", "Agilent VEE", "Agora", "AIMMS", "Alef", "ALF", "ALGOL 58", "ALGOL 60", "ALGOL 68", "ALGOL W", "Alice", "Alma-0", "AmbientTalk", "Amiga E", "AMOS", "AMPL", "APL", "App Inventor for Android's visual block language", "AppleScript", "Arc", "ARexx", "Argus", "AspectJ", "Assembly language", "ATS", "Ateji PX", "AutoHotkey", "Autocoder", "AutoIt", "AutoLISP / Visual LISP", "Averest", "AWK", "Axum", "B", "Babbage", "Bash", "BASIC", "bc", "BCPL", "BeanShell", "Batch (Windows/Dos)", "Bertrand", "BETA", "Bigwig", "Bistro", "BitC", "BLISS", "Blue", "Bon", "Boo", "Boomerang", "Bourne shell", "bash", "ksh", "BREW", "BPEL", "C", "C--", "C++", "C#", "C/AL", "Caché ObjectScript", "C Shell", "Caml", "Candle", "Cayenne", "CDuce", "Cecil", "Cel", "Cesil", "Ceylon", "CFEngine", "CFML", "Cg", "Ch", "Chapel", "CHAIN", "Charity", "Charm", "Chef", "CHILL", "CHIP-8", "chomski", "ChucK", "CICS", "Cilk", "CL", "Claire", "Clarion", "Clean", "Clipper", "CLIST", "Clojure", "CLU", "CMS-2", "COBOL", "Cobra", "CODE", "CoffeeScript", "Cola", "ColdC", "ColdFusion", "COMAL", "Combined Programming Language", "COMIT", "Common Intermediate Language", "Common Lisp", "COMPASS", "Component Pascal", "Constraint Handling Rules", "Converge", "Cool", "Coq", "Coral 66", "Corn", "CorVision", "COWSEL", "CPL", "csh", "CSP", "Csound", "CUDA", "Curl", "Curry", "Cyclone", "Cython", "D", "DASL", "DASL", "Dart", "DataFlex", "Datalog", "DATATRIEVE", "dBase", "dc", "DCL", "Deesel", "Delphi", "DCL", "DinkC", "DIBOL", "Dog", "Draco", "DRAKON", "Dylan", "DYNAMO", "E", "E#", "Ease", "Easy PL/I", "Easy Programming Language", "EASYTRIEVE PLUS", "ECMAScript", "Edinburgh IMP", "EGL", "Eiffel", "ELAN", "Elixir", "Elm", "Emacs Lisp", "Emerald", "Epigram", "EPL", "Erlang", "es", "Escapade", "Escher", "ESPOL", "Esterel", "Etoys", "Euclid", "Euler", "Euphoria", "EusLisp Robot Programming Language", "CMS EXEC", "EXEC 2", "Executable UML", "F", "F#", "Factor", "Falcon", "Fancy", "Fantom", "FAUST", "Felix", "Ferite", "FFP", "Fjölnir", "FL", "Flavors", "Flex", "FLOW-MATIC", "FOCAL", "FOCUS", "FOIL", "FORMAC", "@Formula", "Forth", "Fortran", "Fortress", "FoxBase", "FoxPro", "FP", "FPr", "Franz Lisp", "F-Script", "FSProg", "G", "Google Apps Script", "Game Maker Language", "GameMonkey Script", "GAMS", "GAP", "G-code", "Genie", "GDL", "Gibiane", "GJ", "GEORGE", "GLSL", "GNU E", "GM", "Go", "Go!", "GOAL", "Gödel", "Godiva", "GOM (Good Old Mad)", "Goo", "Gosu", "GOTRAN", "GPSS", "GraphTalk", "GRASS", "Groovy", "Hack (programming language)", "HAL/S", "Hamilton C shell", "Harbour", "Hartmann pipelines", "Haskell", "Haxe", "High Level Assembly", "HLSL", "Hop", "Hope", "Hugo", "Hume", "HyperTalk", "IBM Basic assembly language", "IBM HAScript", "IBM Informix-4GL", "IBM RPG", "ICI", "Icon", "Id", "IDL", "Idris", "IMP", "Inform", "Io", "Ioke", "IPL", "IPTSCRAE", "ISLISP", "ISPF", "ISWIM", "J", "J#", "J++", "JADE", "Jako", "JAL", "Janus", "JASS", "Java", "JavaScript", "JCL", "JEAN", "Join Java", "JOSS", "Joule", "JOVIAL", "Joy", "JScript", "JScript .NET", "JavaFX Script", "Julia", "Jython", "K", "Kaleidoscope", "Karel", "Karel++", "KEE", "Kixtart", "KIF", "Kojo", "Kotlin", "KRC", "KRL", "KUKA", "KRYPTON", "ksh", "L", "L# .NET", "LabVIEW", "Ladder", "Lagoona", "LANSA", "Lasso", "LaTeX", "Lava", "LC-3", "Leda", "Legoscript", "LIL", "LilyPond", "Limbo", "Limnor", "LINC", "Lingo", "Linoleum", "LIS", "LISA", "Lisaac", "Lisp", "Lite-C", "Lithe", "Little b", "Logo", "Logtalk", "LPC", "LSE", "LSL", "LiveCode", "LiveScript", "Lua", "Lucid", "Lustre", "LYaPAS", "Lynx", "M2001", "M4", "Machine code", "MAD", "MAD/I", "Magik", "Magma", "make", "Maple", "MAPPER", "MARK-IV", "Mary", "MASM Microsoft Assembly x86", "Mathematica", "MATLAB", "Maxima", "Macsyma", "Max", "MaxScript", "Maya (MEL)", "MDL", "Mercury", "Mesa", "Metacard", "Metafont", "MetaL", "Microcode", "MicroScript", "MIIS", "MillScript", "MIMIC", "Mirah", "Miranda", "MIVA Script", "ML", "Moby", "Model 204", "Modelica", "Modula", "Modula-2", "Modula-3", "Mohol", "MOO", "Mortran", "Mouse", "MPD", "CIL", "MSL", "MUMPS", "NASM", "NATURAL", "Napier88", "Neko", "Nemerle", "nesC", "NESL", "Net.Data", "NetLogo", "NetRexx", "NewLISP", "NEWP", "Newspeak", "NewtonScript", "NGL", "Nial", "Nice", "Nickle", "NPL", "Not eXactly C", "Not Quite C", "NSIS", "Nu", "NWScript", "NXT-G", "o:XML", "Oak", "Oberon", "Obix", "OBJ2", "Object Lisp", "ObjectLOGO", "Object REXX", "Object Pascal", "Objective-C", "Objective-J", "Obliq", "Obol", "OCaml", "occam", "occam-π", "Octave", "OmniMark", "Onyx", "Opa", "Opal", "OpenCL", "OpenEdge ABL", "OPL", "OPS5", "OptimJ", "Orc", "ORCA/Modula-2", "Oriel", "Orwell", "Oxygene", "Oz", "P#", "ParaSail (programming language)", "PARI/GP", "Pascal", "Pawn", "PCASTL", "PCF", "PEARL", "PeopleCode", "Perl", "PDL", "PHP", "Phrogram", "Pico", "Picolisp", "Pict", "Pike", "PIKT", "PILOT", "Pipelines", "Pizza", "PL-11", "PL/0", "PL/B", "PL/C", "PL/I", "PL/M", "PL/P", "PL/SQL", "PL360", "PLANC", "Plankalkül", "Planner", "PLEX", "PLEXIL", "Plus", "POP-11", "PostScript", "PortablE", "Powerhouse", "PowerBuilder", "PowerShell", "PPL", "Processing", "Processing.js", "Prograph", "PROIV", "Prolog", "PROMAL", "Promela", "PROSE modeling language", "PROTEL", "ProvideX", "Pro*C", "Pure", "Python", "Q (equational programming language)", "Q (programming language from Kx Systems)", "Qalb", "Qi", "QtScript", "QuakeC", "QPL", "R", "R++", "Racket", "RAPID", "Rapira", "Ratfiv", "Ratfor", "rc", "REBOL", "Red", "Redcode", "REFAL", "Reia", "Revolution", "rex", "REXX", "Rlab", "RobotC", "ROOP", "RPG", "RPL", "RSL", "RTL/2", "Ruby", "RuneScript", "Rust", "S", "S2", "S3", "S-Lang", "S-PLUS", "SA-C", "SabreTalk", "SAIL", "SALSA", "SAM76", "SAS", "SASL", "Sather", "Sawzall", "SBL", "Scala", "Scheme", "Scilab", "Scratch", "Script.NET", "Sed", "Seed7", "Self", "SenseTalk", "SequenceL", "SETL", "Shift Script", "SIMPOL", "Shakespeare", "SIGNAL", "SiMPLE", "SIMSCRIPT", "Simula", "Simulink", "SISAL", "SLIP", "SMALL", "Smalltalk", "Small Basic", "SML", "Snap!", "SNOBOL", "SPITBOL", "Snowball", "SOL", "Span", "SPARK", "SPIN", "SP/k", "SPS", "Squeak", "Squirrel", "SR", "S/SL", "Stackless Python", "Starlogo", "Strand", "Stata", "Stateflow", "Subtext", "SuperCollider", "SuperTalk", "Swift (Apple programming language)", "Swift (parallel scripting language)", "SYMPL", "SyncCharts", "SystemVerilog", "T", "TACL", "TACPOL", "TADS", "TAL", "Tcl", "Tea", "TECO", "TELCOMP", "TeX", "TEX", "TIE", "Timber", "TMG", "Tom", "TOM", "Topspeed", "TPU", "Trac", "TTM", "T-SQL", "TTCN", "Turing", "TUTOR", "TXL", "TypeScript", "Turbo C++", "Ubercode", "UCSD Pascal", "Umple", "Unicon", "Uniface", "UNITY", "Unix shell", "UnrealScript", "Vala", "VBA", "VBScript", "Verilog", "VHDL", "Visual Basic", "Visual Basic .NET", "Visual DataFlex", "Visual DialogScript", "Visual Fortran", "Visual FoxPro", "Visual J++", "Visual J#", "Visual Objects", "Visual Prolog", "VSXu", "Vvvv", "WATFIV, WATFOR", "WebDNA", "WebQL", "Windows PowerShell", "Winbatch", "Wolfram", "Wyvern", "X++", "X#", "X10", "XBL", "XC", "XMOS architecture", "xHarbour", "XL", "Xojo", "XOTcl", "XPL", "XPL0", "XQuery", "XSB", "XSLT", "XPath", "Xtend", "Yorick", "YQL", "Z notation", "Zeno", "ZOPL", "ZPL"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/log_level.go b/vendor/github.com/brianvoe/gofakeit/v7/data/log_level.go new file mode 100644 index 0000000000..01d98b63c6 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/log_level.go @@ -0,0 +1,8 @@ +package data + +// LogLevels consists of log levels for several types +var LogLevels = map[string][]string{ + "general": {"error", "warning", "info", "fatal", "trace", "debug"}, + "syslog": {"emerg", "alert", "crit", "err", "warning", "notice", "info", "debug"}, + "apache": {"emerg", "alert", "crit", "error", "warn", "notice", "info", "debug", "trace1-8"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/lorem.go b/vendor/github.com/brianvoe/gofakeit/v7/data/lorem.go new file mode 100644 index 0000000000..b0a8f8a137 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/lorem.go @@ -0,0 +1,6 @@ +package data + +// Lorem consists of lorem ipsum information +var Lorem = map[string][]string{ + "word": {"alias", "consequatur", "aut", "perferendis", "sit", "voluptatem", "accusantium", "doloremque", "aperiam", "eaque", "ipsa", "quae", "ab", "illo", "inventore", "veritatis", "et", "quasi", "architecto", "beatae", "vitae", "dicta", "sunt", "explicabo", "aspernatur", "aut", "odit", "aut", "fugit", "sed", "quia", "consequuntur", "magni", "dolores", "eos", "qui", "ratione", "voluptatem", "sequi", "nesciunt", "neque", "dolorem", "ipsum", "quia", "dolor", "sit", "amet", "consectetur", "adipisci", "velit", "sed", "quia", "non", "numquam", "eius", "modi", "tempora", "incidunt", "ut", "labore", "et", "dolore", "magnam", "aliquam", "quaerat", "voluptatem", "ut", "enim", "ad", "minima", "veniam", "quis", "nostrum", "exercitationem", "ullam", "corporis", "nemo", "enim", "ipsam", "voluptatem", "quia", "voluptas", "sit", "suscipit", "laboriosam", "nisi", "ut", "aliquid", "ex", "ea", "commodi", "consequatur", "quis", "autem", "vel", "eum", "iure", "reprehenderit", "qui", "in", "ea", "voluptate", "velit", "esse", "quam", "nihil", "molestiae", "et", "iusto", "odio", "dignissimos", "ducimus", "qui", "blanditiis", "praesentium", "laudantium", "totam", "rem", "voluptatum", "deleniti", "atque", "corrupti", "quos", "dolores", "et", "quas", "molestias", "excepturi", "sint", "occaecati", "cupiditate", "non", "provident", "sed", "ut", "perspiciatis", "unde", "omnis", "iste", "natus", "error", "similique", "sunt", "in", "culpa", "qui", "officia", "deserunt", "mollitia", "animi", "id", "est", "laborum", "et", "dolorum", "fuga", "et", "harum", "quidem", "rerum", "facilis", "est", "et", "expedita", "distinctio", "nam", "libero", "tempore", "cum", "soluta", "nobis", "est", "eligendi", "optio", "cumque", "nihil", "impedit", "quo", "porro", "quisquam", "est", "qui", "minus", "id", "quod", "maxime", "placeat", "facere", "possimus", "omnis", "voluptas", "assumenda", "est", "omnis", "dolor", "repellendus", "temporibus", "autem", "quibusdam", "et", "aut", "consequatur", "vel", "illum", "qui", "dolorem", "eum", "fugiat", "quo", "voluptas", "nulla", "pariatur", "at", "vero", "eos", "et", "accusamus", "officiis", "debitis", "aut", "rerum", "necessitatibus", "saepe", "eveniet", "ut", "et", "voluptates", "repudiandae", "sint", "et", "molestiae", "non", "recusandae", "itaque", "earum", "rerum", "hic", "tenetur", "a", "sapiente", "delectus", "ut", "aut", "reiciendis", "voluptatibus", "maiores", "doloribus", "asperiores", "repellat"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/minecraft.go b/vendor/github.com/brianvoe/gofakeit/v7/data/minecraft.go new file mode 100644 index 0000000000..015de8af48 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/minecraft.go @@ -0,0 +1,23 @@ +package data + +// Minecraft consists of various minecraft items +var Minecraft = map[string][]string{ + "ore": {"coal", "copper", "iron", "gold", "redstone", "lapis", "diamond", "emerald"}, + "wood": {"oak", "spruce", "birch", "jungle", "acacia", "dark oak"}, + "armortier": {"leather", "chainmail", "iron", "gold", "diamond", "netherite"}, + "armorpart": {"helmet", "chestplate", "leggings", "boots"}, + "weapon": {"sword", "bow", "arrow", "trident", "shield"}, + "tool": {"pickaxe", "axe", "shovel", "hoe", "fishing rod"}, + "dye": {"white", "orange", "magenta", "light blue", "yellow", "lime", "pink", "gray", "light gray", "cyan", "purple", "blue", "brown", "green", "red", "black"}, + "food": {"apple", "baked potato", "beetroot", "beetroot soup", "bread", "cake", "carrot", "chorus fruit", "cooked chicken", "cooked cod", "cooked mutton", "cooked salmon", "cookie", "enchanted golden apple", "golden apple", "glow berry", "golden carrot", "honey bottle", "melon slice", "mushroom stew", "poisonous potato", "potato", "pufferfish", "pumpkin pie", "rabbit stew", "raw beef", "raw chicken", "raw cod", "raw mutton", "raw porkchop", "raw rabbit", "raw salmon", "rotten flesh", "spider eye", "steak", "suspicous stew", "sweet berry", "tropical fish"}, + "animal": {"chicken", "cow", "pig", "rabbit", "sheep", "wolf"}, + "villagerjob": {"armourer", "butcher", "carpenter", "cleric", "farmer", "fisherman", "fletcher", "leatherworker", "librarian", "mason", "nitwit", "shepherd", "toolsmith", "weaponsmith"}, + "villagerstation": {"composter", "smoker", "barrel", "loom", "blast furnace", "brewing stand", "cauldron", "fletching table", "cartography table", "lectern", "smithing table", "stonecutter", "grindstone"}, + "villagerlevel": {"novice", "apprentice", "journeyman", "expert", "master"}, + "mobpassive": {"axolotl", "bat", "cat", "chicken", "cod", "cow", "donkey", "fox", "glow squid", "horse", "mooshroom", "mule", "ocelot", "parrot", "pig", "pufferfish", "rabbit", "salmon", "sheep", "skeleton horse", "snow golem", "squid", "strider", "tropical fish", "turtle", "villager", "wandering trader"}, + "mobneutral": {"bee", "cave spider", "dolphin", "enderman", "goat", "iron golem", "llama", "panda", "piglin", "polar bear", "spider", "trader llama", "wolf", "zombified piglin"}, + "mobhostile": {"blaze", "chicken jockey", "creeper", "drowned", "elder guardian", "endermite", "evoker", "ghast", "guardian", "hoglin phantom", "husk", "magma cube", "phantom", "piglin brute", "pillager", "ravager", "shulker", "silverfish", "skeleton", "skeleton horseman", "slime", "spider jockey", "stray", "vex", "vindicator", "witch", "wither skeleton", "zoglin", "zombie", "zombie villager"}, + "mobboss": {"ender dragon", "wither"}, + "biome": {"plain", "forest", "jungle", "mountain", "desert", "taiga", "snowy tundra", "ice spike", "swamp", "savannah", "badlands", "beach", "stone shore", "river", "ocean", "mushroom island", "the nether", "the end"}, + "weather": {"clear", "rain", "thunder"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/movie.go b/vendor/github.com/brianvoe/gofakeit/v7/data/movie.go new file mode 100644 index 0000000000..9a381ac117 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/movie.go @@ -0,0 +1,130 @@ +package data + +// From IMDB - Top 250 Movies subset to 100 +var Movies = map[string][]string{ + "name": { + "12 Years a Slave", + "1917", + "2001: A Space Odyssey", + "3 Idiots", + "A Beautiful Mind", + "A Clockwork Orange", + "Alien", + "American Beauty", + "American History X", + "Apocalypse Now", + "Avengers: Infinity War", + "Back to the Future", + "Batman Begins", + "Ben-Hur", + "Blade Runner", + "Casablanca", + "Casino", + "Catch Me If You Can", + "Das Leben der Anderen", + "Dead Poets Society", + "Die Hard", + "Django Unchained", + "Fight Club", + "Finding Nemo", + "Forrest Gump", + "Full Metal Jacket", + "Gandhi", + "Gladiator", + "Gone with the Wind", + "Good Will Hunting", + "Goodfellas", + "Green Book", + "Groundhog Day", + "Harry Potter and the Deathly Hallows - Part 2", + "Heat", + "Inception", + "Indiana Jones and the Last Crusade", + "Inglourious Basterds", + "Interstellar", + "Into the Wild", + "Intouchables", + "Joker", + "Judgment at Nuremberg", + "Jurassic Park", + "Kill Bill: Vol. 1", + "L.A. Confidential", + "La vita è bella", + "Lock, Stock and Two Smoking Barrels", + "Léon", + "Mad Max: Fury Road", + "Memento", + "Million Dollar Baby", + "Monsters, Inc.", + "Monty Python and the Holy Grail", + "No Country for Old Men", + "Once Upon a Time in America", + "One Flew Over the Cuckoo's Nest", + "Pirates of the Caribbean: The Curse of the Black Pearl", + "Platoon", + "Prisoners", + "Psycho", + "Pulp Fiction", + "Raiders of the Lost Ark", + "Ratatouille", + "Reservoir Dogs", + "Rocky", + "Saving Private Ryan", + "Scarface", + "Schindler's List", + "Se7en", + "Sherlock Jr.", + "Shutter Island", + "Snatch", + "Spider-Man: No Way Home", + "Star Wars: Episode VI - Return of the Jedi", + "Taxi Driver", + "Terminator 2: Judgment Day", + "The Big Lebowski", + "The Dark Knight", + "The Departed", + "The Empire Strikes Back", + "The Godfather", + "The Green Mile", + "The Lion King", + "The Lord of the Rings: The Fellowship of the Ring", + "The Matrix", + "The Pianist", + "The Prestige", + "The Shawshank Redemption", + "The Terminator", + "The Usual Suspects", + "The Wolf of Wall Street", + "Top Gun: Maverick", + "Toy Story", + "Unforgiven", + "Up", + "V for Vendetta", + "WALL·E", + "Warrior", + "Whiplash", + }, + "genre": { + "Action", + "Adventure", + "Animation", + "Biography", + "Comedy", + "Crime", + "Drama", + "Family", + "Fantasy", + "Film-Noir", + "History", + "Horror", + "Music", + "Musical", + "Mystery", + "Romance", + "Sci-Fi", + "Sport", + "Thriller", + "War", + "Western", + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/payment.go b/vendor/github.com/brianvoe/gofakeit/v7/data/payment.go new file mode 100644 index 0000000000..77147cd87c --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/payment.go @@ -0,0 +1,211 @@ +package data + +// CreditCardInfo contains credit card info +type CreditCardInfo struct { + Display string + Patterns []uint + Gaps []uint + Lengths []uint + Code CreditCardCode +} + +// CreditCardCode contains code type and size +type CreditCardCode struct { + Name string + Size uint +} + +// CreditCardTypes is an array of credit card types +var CreditCardTypes = []string{"visa", "mastercard", "american-express", "diners-club", "discover", "jcb", "unionpay", "maestro", "elo", "hiper", "hipercard"} + +// CreditCards contains payment information +var CreditCards = map[string]CreditCardInfo{ + "visa": { + Display: "Visa", + Patterns: []uint{4}, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16}, + Code: CreditCardCode{ + Name: "CVV", + Size: 3, + }, + }, + "mastercard": { + Display: "Mastercard", + Patterns: []uint{ + 51, 55, + 2221, 2229, + 223, 229, + 23, 26, + 270, 271, + 2720, + }, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16}, + Code: CreditCardCode{ + Name: "CVC", + Size: 3, + }, + }, + "american-express": { + Display: "American Express", + Patterns: []uint{34, 37}, + Gaps: []uint{4, 10}, + Lengths: []uint{15}, + Code: CreditCardCode{ + Name: "CID", + Size: 4, + }, + }, + "diners-club": { + Display: "Diners Club", + Patterns: []uint{ + 300, 305, + 36, 38, 39, + }, + Gaps: []uint{4, 10}, + Lengths: []uint{14, 16, 19}, + Code: CreditCardCode{ + Name: "CVV", + Size: 3, + }, + }, + "discover": { + Display: "Discover", + Patterns: []uint{ + 6011, 644, 649, 65, + }, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16, 19}, + Code: CreditCardCode{ + Name: "CID", + Size: 3, + }, + }, + "jcb": { + Display: "JCB", + Patterns: []uint{ + 2131, 1800, 3528, 3589, + }, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16, 17, 18, 19}, + Code: CreditCardCode{ + Name: "CVV", + Size: 3, + }, + }, + "unionpay": { + Display: "UnionPay", + Patterns: []uint{ + 620, 624, 626, + 62100, 62182, + 62184, 62187, + 62185, 62197, + 62200, 62205, + 622010, 622999, + 622018, + 622019, 622999, + 62207, 62209, + 622126, 622925, + 623, 626, + 6270, 6272, 6276, + 627700, 627779, + 627781, 627799, + 6282, 6289, + 6291, 6292, + 810, + 8110, 8131, + 8132, 8151, + 8152, 8163, + 8164, 817, + }, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{14, 15, 16, 17, 18, 19}, + Code: CreditCardCode{ + Name: "CVN", + Size: 3, + }, + }, + "maestro": { + Display: "Maestro", + Patterns: []uint{ + 493698, + 500000, 506698, + 506779, 508999, + 56, 59, + 6, 63, 67, + }, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{12, 13, 14, 15, 16, 17, 18, 19}, + Code: CreditCardCode{ + Name: "CVC", + Size: 3, + }, + }, + "elo": { + Display: "Elo", + Patterns: []uint{ + 401178, 401179, + 438935, 457631, + 457632, 431274, + 451416, 457393, + 504175, 506699, + 506778, 509000, + 509999, 627780, + 636297, 636368, + 650031, 650033, + 650035, 650051, + 650405, 650439, + 650485, 650538, + 650541, 650598, + 650700, 650718, + 650720, 650727, + 650901, 650978, + 651652, 651679, + 655000, 655019, + 655021, 65505, + }, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16}, + Code: CreditCardCode{ + Name: "CVE", + Size: 3, + }, + }, + "mir": { + Display: "Mir", + Patterns: []uint{2200, 2204}, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16, 17, 18, 19}, + Code: CreditCardCode{ + Name: "CVP2", + Size: 3, + }, + }, + "hiper": { + Display: "Hiper", + Patterns: []uint{ + 637095, + 637568, + 637599, + 637609, + 637612, + }, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16}, + Code: CreditCardCode{ + Name: "CVC", + Size: 3, + }, + }, + "hipercard": { + Display: "Hipercard", + Patterns: []uint{606282}, + Gaps: []uint{4, 8, 12}, + Lengths: []uint{16}, + Code: CreditCardCode{ + Name: "CVC", + Size: 3, + }, + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/person.go b/vendor/github.com/brianvoe/gofakeit/v7/data/person.go new file mode 100644 index 0000000000..8f65a16bf1 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/person.go @@ -0,0 +1,12 @@ +package data + +// Person consists of a slice of people information +var Person = map[string][]string{ + "prefix": {"Mr.", "Mrs.", "Ms.", "Miss", "Dr."}, + "suffix": {"Jr.", "Sr.", "I", "II", "III", "IV", "V", "MD", "DDS", "PhD", "DVM"}, + "first": {"Aaliyah", "Aaron", "Abagail", "Abbey", "Abbie", "Abbigail", "Abby", "Abdiel", "Abdul", "Abdullah", "Abe", "Abel", "Abelardo", "Abigail", "Abigale", "Abigayle", "Abner", "Abraham", "Ada", "Adah", "Adalberto", "Adaline", "Adam", "Adan", "Addie", "Addison", "Adela", "Adelbert", "Adele", "Adelia", "Adeline", "Adell", "Adella", "Adelle", "Aditya", "Adolf", "Adolfo", "Adolph", "Adolphus", "Adonis", "Adrain", "Adrian", "Adriana", "Adrianna", "Adriel", "Adrien", "Adrienne", "Afton", "Aglae", "Agnes", "Agustin", "Agustina", "Ahmad", "Ahmed", "Aida", "Aidan", "Aiden", "Aileen", "Aimee", "Aisha", "Aiyana", "Akeem", "Al", "Alaina", "Alan", "Alana", "Alanis", "Alanna", "Alayna", "Alba", "Albert", "Alberta", "Albertha", "Alberto", "Albin", "Albina", "Alda", "Alden", "Alec", "Aleen", "Alejandra", "Alejandrin", "Alek", "Alena", "Alene", "Alessandra", "Alessandro", "Alessia", "Aletha", "Alex", "Alexa", "Alexander", "Alexandra", "Alexandre", "Alexandrea", "Alexandria", "Alexandrine", "Alexandro", "Alexane", "Alexanne", "Alexie", "Alexis", "Alexys", "Alexzander", "Alf", "Alfonso", "Alfonzo", "Alford", "Alfred", "Alfreda", "Alfredo", "Ali", "Alia", "Alice", "Alicia", "Alisa", "Alisha", "Alison", "Alivia", "Aliya", "Aliyah", "Aliza", "Alize", "Allan", "Allen", "Allene", "Allie", "Allison", "Ally", "Alphonso", "Alta", "Althea", "Alva", "Alvah", "Alvena", "Alvera", "Alverta", "Alvina", "Alvis", "Alyce", "Alycia", "Alysa", "Alysha", "Alyson", "Alysson", "Amalia", "Amanda", "Amani", "Amara", "Amari", "Amaya", "Amber", "Ambrose", "Amelia", "Amelie", "Amely", "America", "Americo", "Amie", "Amina", "Amir", "Amira", "Amiya", "Amos", "Amparo", "Amy", "Amya", "Ana", "Anabel", "Anabelle", "Anahi", "Anais", "Anastacio", "Anastasia", "Anderson", "Andre", "Andreane", "Andreanne", "Andres", "Andrew", "Andy", "Angel", "Angela", "Angelica", "Angelina", "Angeline", "Angelita", "Angelo", "Angie", "Angus", "Anibal", "Anika", "Anissa", "Anita", "Aniya", "Aniyah", "Anjali", "Anna", "Annabel", "Annabell", "Annabelle", "Annalise", "Annamae", "Annamarie", "Anne", "Annetta", "Annette", "Annie", "Ansel", "Ansley", "Anthony", "Antoinette", "Antone", "Antonetta", "Antonette", "Antonia", "Antonietta", "Antonina", "Antonio", "Antwan", "Antwon", "Anya", "April", "Ara", "Araceli", "Aracely", "Arch", "Archibald", "Ardella", "Arden", "Ardith", "Arely", "Ari", "Ariane", "Arianna", "Aric", "Ariel", "Arielle", "Arjun", "Arlene", "Arlie", "Arlo", "Armand", "Armando", "Armani", "Arnaldo", "Arne", "Arno", "Arnold", "Arnoldo", "Arnulfo", "Aron", "Art", "Arthur", "Arturo", "Arvel", "Arvid", "Arvilla", "Aryanna", "Asa", "Asha", "Ashlee", "Ashleigh", "Ashley", "Ashly", "Ashlynn", "Ashton", "Ashtyn", "Asia", "Assunta", "Astrid", "Athena", "Aubree", "Aubrey", "Audie", "Audra", "Audreanne", "Audrey", "August", "Augusta", "Augustine", "Augustus", "Aurelia", "Aurelie", "Aurelio", "Aurore", "Austen", "Austin", "Austyn", "Autumn", "Ava", "Avery", "Avis", "Axel", "Ayana", "Ayden", "Ayla", "Aylin", "Baby", "Bailee", "Bailey", "Barbara", "Barney", "Baron", "Barrett", "Barry", "Bart", "Bartholome", "Barton", "Baylee", "Beatrice", "Beau", "Beaulah", "Bell", "Bella", "Belle", "Ben", "Benedict", "Benjamin", "Bennett", "Bennie", "Benny", "Benton", "Berenice", "Bernadette", "Bernadine", "Bernard", "Bernardo", "Berneice", "Bernhard", "Bernice", "Bernie", "Berniece", "Bernita", "Berry", "Bert", "Berta", "Bertha", "Bertram", "Bertrand", "Beryl", "Bessie", "Beth", "Bethany", "Bethel", "Betsy", "Bette", "Bettie", "Betty", "Bettye", "Beulah", "Beverly", "Bianka", "Bill", "Billie", "Billy", "Birdie", "Blair", "Blaise", "Blake", "Blanca", "Blanche", "Blaze", "Bo", "Bobbie", "Bobby", "Bonita", "Bonnie", "Boris", "Boyd", "Brad", "Braden", "Bradford", "Bradley", "Bradly", "Brady", "Braeden", "Brain", "Brandi", "Brando", "Brandon", "Brandt", "Brandy", "Brandyn", "Brannon", "Branson", "Brant", "Braulio", "Braxton", "Brayan", "Breana", "Breanna", "Breanne", "Brenda", "Brendan", "Brenden", "Brendon", "Brenna", "Brennan", "Brennon", "Brent", "Bret", "Brett", "Bria", "Brian", "Briana", "Brianne", "Brice", "Bridget", "Bridgette", "Bridie", "Brielle", "Brigitte", "Brionna", "Brisa", "Britney", "Brittany", "Brock", "Broderick", "Brody", "Brook", "Brooke", "Brooklyn", "Brooks", "Brown", "Bruce", "Bryana", "Bryce", "Brycen", "Bryon", "Buck", "Bud", "Buddy", "Buford", "Bulah", "Burdette", "Burley", "Burnice", "Buster", "Cade", "Caden", "Caesar", "Caitlyn", "Cale", "Caleb", "Caleigh", "Cali", "Calista", "Callie", "Camden", "Cameron", "Camila", "Camilla", "Camille", "Camren", "Camron", "Camryn", "Camylle", "Candace", "Candelario", "Candice", "Candida", "Candido", "Cara", "Carey", "Carissa", "Carlee", "Carleton", "Carley", "Carli", "Carlie", "Carlo", "Carlos", "Carlotta", "Carmel", "Carmela", "Carmella", "Carmelo", "Carmen", "Carmine", "Carol", "Carolanne", "Carole", "Carolina", "Caroline", "Carolyn", "Carolyne", "Carrie", "Carroll", "Carson", "Carter", "Cary", "Casandra", "Casey", "Casimer", "Casimir", "Casper", "Cassandra", "Cassandre", "Cassidy", "Cassie", "Catalina", "Caterina", "Catharine", "Catherine", "Cathrine", "Cathryn", "Cathy", "Cayla", "Ceasar", "Cecelia", "Cecil", "Cecile", "Cecilia", "Cedrick", "Celestine", "Celestino", "Celia", "Celine", "Cesar", "Chad", "Chadd", "Chadrick", "Chaim", "Chance", "Chandler", "Chanel", "Chanelle", "Charity", "Charlene", "Charles", "Charley", "Charlie", "Charlotte", "Chase", "Chasity", "Chauncey", "Chaya", "Chaz", "Chelsea", "Chelsey", "Chelsie", "Chesley", "Chester", "Chet", "Cheyanne", "Cheyenne", "Chloe", "Chris", "Christ", "Christa", "Christelle", "Christian", "Christiana", "Christina", "Christine", "Christop", "Christophe", "Christopher", "Christy", "Chyna", "Ciara", "Cicero", "Cielo", "Cierra", "Cindy", "Citlalli", "Clair", "Claire", "Clara", "Clarabelle", "Clare", "Clarissa", "Clark", "Claud", "Claude", "Claudia", "Claudie", "Claudine", "Clay", "Clemens", "Clement", "Clementina", "Clementine", "Clemmie", "Cleo", "Cleora", "Cleta", "Cletus", "Cleve", "Cleveland", "Clifford", "Clifton", "Clint", "Clinton", "Clotilde", "Clovis", "Cloyd", "Clyde", "Coby", "Cody", "Colby", "Cole", "Coleman", "Colin", "Colleen", "Collin", "Colt", "Colten", "Colton", "Columbus", "Concepcion", "Conner", "Connie", "Connor", "Conor", "Conrad", "Constance", "Constantin", "Consuelo", "Cooper", "Cora", "Coralie", "Corbin", "Cordelia", "Cordell", "Cordia", "Cordie", "Corene", "Corine", "Cornelius", "Cornell", "Corrine", "Cortez", "Cortney", "Cory", "Coty", "Courtney", "Coy", "Craig", "Crawford", "Creola", "Cristal", "Cristian", "Cristina", "Cristobal", "Cristopher", "Cruz", "Crystal", "Crystel", "Cullen", "Curt", "Curtis", "Cydney", "Cynthia", "Cyril", "Cyrus", "Dagmar", "Dahlia", "Daija", "Daisha", "Daisy", "Dakota", "Dale", "Dallas", "Dallin", "Dalton", "Damaris", "Dameon", "Damian", "Damien", "Damion", "Damon", "Dan", "Dana", "Dandre", "Dane", "Dangelo", "Dangelo", "Danial", "Daniela", "Daniella", "Danielle", "Danika", "Dannie", "Danny", "Dante", "Danyka", "Daphne", "Daphnee", "Daphney", "Darby", "Daren", "Darian", "Dariana", "Darien", "Dario", "Darion", "Darius", "Darlene", "Daron", "Darrel", "Darrell", "Darren", "Darrick", "Darrin", "Darrion", "Darron", "Darryl", "Darwin", "Daryl", "Dashawn", "Dasia", "Dave", "David", "Davin", "Davion", "Davon", "Davonte", "Dawn", "Dawson", "Dax", "Dayana", "Dayna", "Dayne", "Dayton", "Dean", "Deangelo", "Deanna", "Deborah", "Declan", "Dedric", "Dedrick", "Dee", "Deion", "Deja", "Dejah", "Dejon", "Dejuan", "Delaney", "Delbert", "Delfina", "Delia", "Delilah", "Dell", "Della", "Delmer", "Delores", "Delpha", "Delphia", "Delphine", "Delta", "Demarco", "Demarcus", "Demario", "Demetris", "Demetrius", "Demond", "Dena", "Denis", "Dennis", "Deon", "Deondre", "Deontae", "Deonte", "Dereck", "Derek", "Derick", "Deron", "Derrick", "Deshaun", "Deshawn", "Desiree", "Desmond", "Dessie", "Destany", "Destin", "Destinee", "Destiney", "Destini", "Destiny", "Devan", "Devante", "Deven", "Devin", "Devon", "Devonte", "Devyn", "Dewayne", "Dewitt", "Dexter", "Diamond", "Diana", "Dianna", "Diego", "Dillan", "Dillon", "Dimitri", "Dina", "Dino", "Dion", "Dixie", "Dock", "Dolly", "Dolores", "Domenic", "Domenica", "Domenick", "Domenico", "Domingo", "Dominic", "Dominique", "Don", "Donald", "Donato", "Donavon", "Donna", "Donnell", "Donnie", "Donny", "Dora", "Dorcas", "Dorian", "Doris", "Dorothea", "Dorothy", "Dorris", "Dortha", "Dorthy", "Doug", "Douglas", "Dovie", "Doyle", "Drake", "Drew", "Duane", "Dudley", "Dulce", "Duncan", "Durward", "Dustin", "Dusty", "Dwight", "Dylan", "Earl", "Earlene", "Earline", "Earnest", "Earnestine", "Easter", "Easton", "Ebba", "Ebony", "Ed", "Eda", "Edd", "Eddie", "Eden", "Edgar", "Edgardo", "Edison", "Edmond", "Edmund", "Edna", "Eduardo", "Edward", "Edwardo", "Edwin", "Edwina", "Edyth", "Edythe", "Effie", "Efrain", "Efren", "Eileen", "Einar", "Eino", "Eladio", "Elaina", "Elbert", "Elda", "Eldon", "Eldora", "Eldred", "Eldridge", "Eleanora", "Eleanore", "Eleazar", "Electa", "Elena", "Elenor", "Elenora", "Eleonore", "Elfrieda", "Eli", "Elian", "Eliane", "Elias", "Eliezer", "Elijah", "Elinor", "Elinore", "Elisa", "Elisabeth", "Elise", "Eliseo", "Elisha", "Elissa", "Eliza", "Elizabeth", "Ella", "Ellen", "Ellie", "Elliot", "Elliott", "Ellis", "Ellsworth", "Elmer", "Elmira", "Elmo", "Elmore", "Elna", "Elnora", "Elody", "Eloisa", "Eloise", "Elouise", "Eloy", "Elroy", "Elsa", "Else", "Elsie", "Elta", "Elton", "Elva", "Elvera", "Elvie", "Elvis", "Elwin", "Elwyn", "Elyse", "Elyssa", "Elza", "Emanuel", "Emelia", "Emelie", "Emely", "Emerald", "Emerson", "Emery", "Emie", "Emil", "Emile", "Emilia", "Emiliano", "Emilie", "Emilio", "Emily", "Emma", "Emmalee", "Emmanuel", "Emmanuelle", "Emmet", "Emmett", "Emmie", "Emmitt", "Emmy", "Emory", "Ena", "Enid", "Enoch", "Enola", "Enos", "Enrico", "Enrique", "Ephraim", "Era", "Eriberto", "Eric", "Erica", "Erich", "Erick", "Ericka", "Erik", "Erika", "Erin", "Erling", "Erna", "Ernest", "Ernestina", "Ernestine", "Ernesto", "Ernie", "Ervin", "Erwin", "Eryn", "Esmeralda", "Esperanza", "Esta", "Esteban", "Estefania", "Estel", "Estell", "Estella", "Estelle", "Estevan", "Esther", "Estrella", "Etha", "Ethan", "Ethel", "Ethelyn", "Ethyl", "Ettie", "Eudora", "Eugene", "Eugenia", "Eula", "Eulah", "Eulalia", "Euna", "Eunice", "Eusebio", "Eva", "Evalyn", "Evan", "Evangeline", "Evans", "Eve", "Eveline", "Evelyn", "Everardo", "Everett", "Everette", "Evert", "Evie", "Ewald", "Ewell", "Ezekiel", "Ezequiel", "Ezra", "Fabian", "Fabiola", "Fae", "Fannie", "Fanny", "Fatima", "Faustino", "Fausto", "Favian", "Fay", "Faye", "Federico", "Felicia", "Felicita", "Felicity", "Felipa", "Felipe", "Felix", "Felton", "Fermin", "Fern", "Fernando", "Ferne", "Fidel", "Filiberto", "Filomena", "Finn", "Fiona", "Flavie", "Flavio", "Fleta", "Fletcher", "Flo", "Florence", "Florencio", "Florian", "Florida", "Florine", "Flossie", "Floy", "Floyd", "Ford", "Forest", "Forrest", "Foster", "Frances", "Francesca", "Francesco", "Francis", "Francisca", "Francisco", "Franco", "Frank", "Frankie", "Franz", "Fred", "Freda", "Freddie", "Freddy", "Frederic", "Frederick", "Frederik", "Frederique", "Fredrick", "Fredy", "Freeda", "Freeman", "Freida", "Frida", "Frieda", "Friedrich", "Fritz", "Furman", "Gabe", "Gabriel", "Gabriella", "Gabrielle", "Gaetano", "Gage", "Gail", "Gardner", "Garett", "Garfield", "Garland", "Garnet", "Garnett", "Garret", "Garrett", "Garrick", "Garrison", "Garry", "Garth", "Gaston", "Gavin", "Gay", "Gayle", "Gaylord", "Gene", "General", "Genesis", "Genevieve", "Gennaro", "Genoveva", "Geo", "Geoffrey", "George", "Georgette", "Georgiana", "Georgianna", "Geovanni", "Geovanny", "Geovany", "Gerald", "Geraldine", "Gerard", "Gerardo", "Gerda", "Gerhard", "Germaine", "German", "Gerry", "Gerson", "Gertrude", "Gia", "Gianni", "Gideon", "Gilbert", "Gilberto", "Gilda", "Giles", "Gillian", "Gina", "Gino", "Giovani", "Giovanna", "Giovanni", "Giovanny", "Gisselle", "Giuseppe", "Gladyce", "Gladys", "Glen", "Glenda", "Glenna", "Glennie", "Gloria", "Godfrey", "Golda", "Golden", "Gonzalo", "Gordon", "Grace", "Gracie", "Graciela", "Grady", "Graham", "Grant", "Granville", "Grayce", "Grayson", "Green", "Greg", "Gregg", "Gregoria", "Gregorio", "Gregory", "Greta", "Gretchen", "Greyson", "Griffin", "Grover", "Guadalupe", "Gudrun", "Guido", "Guillermo", "Guiseppe", "Gunnar", "Gunner", "Gus", "Gussie", "Gust", "Gustave", "Guy", "Gwen", "Gwendolyn", "Hadley", "Hailee", "Hailey", "Hailie", "Hal", "Haleigh", "Haley", "Halie", "Halle", "Hallie", "Hank", "Hanna", "Hannah", "Hans", "Hardy", "Harley", "Harmon", "Harmony", "Harold", "Harrison", "Harry", "Harvey", "Haskell", "Hassan", "Hassie", "Hattie", "Haven", "Hayden", "Haylee", "Hayley", "Haylie", "Hazel", "Hazle", "Heath", "Heather", "Heaven", "Heber", "Hector", "Heidi", "Helen", "Helena", "Helene", "Helga", "Hellen", "Helmer", "Heloise", "Henderson", "Henri", "Henriette", "Henry", "Herbert", "Herman", "Hermann", "Hermina", "Herminia", "Herminio", "Hershel", "Herta", "Hertha", "Hester", "Hettie", "Hilario", "Hilbert", "Hilda", "Hildegard", "Hillard", "Hillary", "Hilma", "Hilton", "Hipolito", "Hiram", "Hobart", "Holden", "Hollie", "Hollis", "Holly", "Hope", "Horace", "Horacio", "Hortense", "Hosea", "Houston", "Howard", "Howell", "Hoyt", "Hubert", "Hudson", "Hugh", "Hulda", "Humberto", "Hunter", "Hyman", "Ian", "Ibrahim", "Icie", "Ida", "Idell", "Idella", "Ignacio", "Ignatius", "Ike", "Ila", "Ilene", "Iliana", "Ima", "Imani", "Imelda", "Immanuel", "Imogene", "Ines", "Irma", "Irving", "Irwin", "Isaac", "Isabel", "Isabell", "Isabella", "Isabelle", "Isac", "Isadore", "Isai", "Isaiah", "Isaias", "Isidro", "Ismael", "Isobel", "Isom", "Israel", "Issac", "Itzel", "Iva", "Ivah", "Ivory", "Ivy", "Izabella", "Izaiah", "Jabari", "Jace", "Jacey", "Jacinthe", "Jacinto", "Jack", "Jackeline", "Jackie", "Jacklyn", "Jackson", "Jacky", "Jaclyn", "Jacquelyn", "Jacques", "Jacynthe", "Jada", "Jade", "Jaden", "Jadon", "Jadyn", "Jaeden", "Jaida", "Jaiden", "Jailyn", "Jaime", "Jairo", "Jakayla", "Jake", "Jakob", "Jaleel", "Jalen", "Jalon", "Jalyn", "Jamaal", "Jamal", "Jamar", "Jamarcus", "Jamel", "Jameson", "Jamey", "Jamie", "Jamil", "Jamir", "Jamison", "Jammie", "Jan", "Jana", "Janae", "Jane", "Janelle", "Janessa", "Janet", "Janice", "Janick", "Janie", "Janis", "Janiya", "Jannie", "Jany", "Jaquan", "Jaquelin", "Jaqueline", "Jared", "Jaren", "Jarod", "Jaron", "Jarred", "Jarrell", "Jarret", "Jarrett", "Jarrod", "Jarvis", "Jasen", "Jasmin", "Jason", "Jasper", "Jaunita", "Javier", "Javon", "Javonte", "Jay", "Jayce", "Jaycee", "Jayda", "Jayde", "Jayden", "Jaydon", "Jaylan", "Jaylen", "Jaylin", "Jaylon", "Jayme", "Jayne", "Jayson", "Jazlyn", "Jazmin", "Jazmyn", "Jazmyne", "Jean", "Jeanette", "Jeanie", "Jeanne", "Jed", "Jedediah", "Jedidiah", "Jeff", "Jefferey", "Jeffery", "Jeffrey", "Jeffry", "Jena", "Jenifer", "Jennie", "Jennifer", "Jennings", "Jennyfer", "Jensen", "Jerad", "Jerald", "Jeramie", "Jeramy", "Jerel", "Jeremie", "Jeremy", "Jermain", "Jermaine", "Jermey", "Jerod", "Jerome", "Jeromy", "Jerrell", "Jerrod", "Jerrold", "Jerry", "Jess", "Jesse", "Jessica", "Jessie", "Jessika", "Jessy", "Jessyca", "Jesus", "Jett", "Jettie", "Jevon", "Jewel", "Jewell", "Jillian", "Jimmie", "Jimmy", "Jo", "Joan", "Joana", "Joanie", "Joanne", "Joannie", "Joanny", "Joany", "Joaquin", "Jocelyn", "Jodie", "Jody", "Joe", "Joel", "Joelle", "Joesph", "Joey", "Johan", "Johann", "Johanna", "Johathan", "John", "Johnathan", "Johnathon", "Johnnie", "Johnny", "Johnpaul", "Johnson", "Jolie", "Jon", "Jonas", "Jonatan", "Jonathan", "Jonathon", "Jordan", "Jordane", "Jordi", "Jordon", "Jordy", "Jordyn", "Jorge", "Jose", "Josefa", "Josefina", "Joseph", "Josephine", "Josh", "Joshua", "Joshuah", "Josiah", "Josiane", "Josianne", "Josie", "Josue", "Jovan", "Jovani", "Jovanny", "Jovany", "Joy", "Joyce", "Juana", "Juanita", "Judah", "Judd", "Jude", "Judge", "Judson", "Judy", "Jules", "Julia", "Julian", "Juliana", "Julianne", "Julie", "Julien", "Juliet", "Julio", "Julius", "June", "Junior", "Junius", "Justen", "Justice", "Justina", "Justine", "Juston", "Justus", "Justyn", "Juvenal", "Juwan", "Kacey", "Kaci", "Kacie", "Kade", "Kaden", "Kadin", "Kaela", "Kaelyn", "Kaia", "Kailee", "Kailey", "Kailyn", "Kaitlin", "Kaitlyn", "Kale", "Kaleb", "Kaleigh", "Kaley", "Kali", "Kallie", "Kameron", "Kamille", "Kamren", "Kamron", "Kamryn", "Kane", "Kara", "Kareem", "Karelle", "Karen", "Kari", "Kariane", "Karianne", "Karina", "Karine", "Karl", "Karlee", "Karley", "Karli", "Karlie", "Karolann", "Karson", "Kasandra", "Kasey", "Kassandra", "Katarina", "Katelin", "Katelyn", "Katelynn", "Katharina", "Katherine", "Katheryn", "Kathleen", "Kathlyn", "Kathryn", "Kathryne", "Katlyn", "Katlynn", "Katrina", "Katrine", "Kattie", "Kavon", "Kay", "Kaya", "Kaycee", "Kayden", "Kayla", "Kaylah", "Kaylee", "Kayleigh", "Kayley", "Kayli", "Kaylie", "Kaylin", "Keagan", "Keanu", "Keara", "Keaton", "Keegan", "Keeley", "Keely", "Keenan", "Keira", "Keith", "Kellen", "Kelley", "Kelli", "Kellie", "Kelly", "Kelsi", "Kelsie", "Kelton", "Kelvin", "Ken", "Kendall", "Kendra", "Kendrick", "Kenna", "Kennedi", "Kennedy", "Kenneth", "Kennith", "Kenny", "Kenton", "Kenya", "Kenyatta", "Kenyon", "Keon", "Keshaun", "Keshawn", "Keven", "Kevin", "Kevon", "Keyon", "Keyshawn", "Khalid", "Khalil", "Kian", "Kiana", "Kianna", "Kiara", "Kiarra", "Kiel", "Kiera", "Kieran", "Kiley", "Kim", "Kimberly", "King", "Kip", "Kira", "Kirk", "Kirsten", "Kirstin", "Kitty", "Kobe", "Koby", "Kody", "Kolby", "Kole", "Korbin", "Korey", "Kory", "Kraig", "Kris", "Krista", "Kristian", "Kristin", "Kristina", "Kristofer", "Kristoffer", "Kristopher", "Kristy", "Krystal", "Krystel", "Krystina", "Kurt", "Kurtis", "Kyla", "Kyle", "Kylee", "Kyleigh", "Kyler", "Kylie", "Kyra", "Lacey", "Lacy", "Ladarius", "Lafayette", "Laila", "Laisha", "Lamar", "Lambert", "Lamont", "Lance", "Landen", "Lane", "Laney", "Larissa", "Laron", "Larry", "Larue", "Laura", "Laurel", "Lauren", "Laurence", "Lauretta", "Lauriane", "Laurianne", "Laurie", "Laurine", "Laury", "Lauryn", "Lavada", "Lavern", "Laverna", "Laverne", "Lavina", "Lavinia", "Lavon", "Lavonne", "Lawrence", "Lawson", "Layla", "Layne", "Lazaro", "Lea", "Leann", "Leanna", "Leanne", "Leatha", "Leda", "Lee", "Leif", "Leila", "Leilani", "Lela", "Lelah", "Leland", "Lelia", "Lempi", "Lemuel", "Lenna", "Lennie", "Lenny", "Lenora", "Lenore", "Leo", "Leola", "Leon", "Leonard", "Leonardo", "Leone", "Leonel", "Leonie", "Leonor", "Leonora", "Leopold", "Leopoldo", "Leora", "Lera", "Lesley", "Leslie", "Lesly", "Lessie", "Lester", "Leta", "Letha", "Letitia", "Levi", "Lew", "Lewis", "Lexi", "Lexie", "Lexus", "Lia", "Liam", "Liana", "Libbie", "Libby", "Lila", "Lilian", "Liliana", "Liliane", "Lilla", "Lillian", "Lilliana", "Lillie", "Lilly", "Lily", "Lilyan", "Lina", "Lincoln", "Linda", "Lindsay", "Lindsey", "Linnea", "Linnie", "Linwood", "Lionel", "Lisa", "Lisandro", "Lisette", "Litzy", "Liza", "Lizeth", "Lizzie", "Llewellyn", "Lloyd", "Logan", "Lois", "Lola", "Lolita", "Loma", "Lon", "London", "Lonie", "Lonnie", "Lonny", "Lonzo", "Lora", "Loraine", "Loren", "Lorena", "Lorenz", "Lorenza", "Lorenzo", "Lori", "Lorine", "Lorna", "Lottie", "Lou", "Louie", "Louisa", "Lourdes", "Louvenia", "Lowell", "Loy", "Loyal", "Loyce", "Lucas", "Luciano", "Lucie", "Lucienne", "Lucile", "Lucinda", "Lucio", "Lucious", "Lucius", "Lucy", "Ludie", "Ludwig", "Lue", "Luella", "Luigi", "Luis", "Luisa", "Lukas", "Lula", "Lulu", "Luna", "Lupe", "Lura", "Lurline", "Luther", "Luz", "Lyda", "Lydia", "Lyla", "Lynn", "Lyric", "Lysanne", "Mabel", "Mabelle", "Mable", "Mac", "Macey", "Maci", "Macie", "Mack", "Mackenzie", "Macy", "Madaline", "Madalyn", "Maddison", "Madeline", "Madelyn", "Madelynn", "Madge", "Madie", "Madilyn", "Madisen", "Madison", "Madisyn", "Madonna", "Madyson", "Mae", "Maegan", "Maeve", "Mafalda", "Magali", "Magdalen", "Magdalena", "Maggie", "Magnolia", "Magnus", "Maia", "Maida", "Maiya", "Major", "Makayla", "Makenna", "Makenzie", "Malachi", "Malcolm", "Malika", "Malinda", "Mallie", "Mallory", "Malvina", "Mandy", "Manley", "Manuel", "Manuela", "Mara", "Marc", "Marcel", "Marcelina", "Marcelino", "Marcella", "Marcelle", "Marcellus", "Marcelo", "Marcia", "Marco", "Marcos", "Marcus", "Margaret", "Margarete", "Margarett", "Margaretta", "Margarette", "Margarita", "Marge", "Margie", "Margot", "Margret", "Marguerite", "Maria", "Mariah", "Mariam", "Marian", "Mariana", "Mariane", "Marianna", "Marianne", "Mariano", "Maribel", "Marie", "Mariela", "Marielle", "Marietta", "Marilie", "Marilou", "Marilyne", "Marina", "Mario", "Marion", "Marisa", "Marisol", "Maritza", "Marjolaine", "Marjorie", "Marjory", "Mark", "Markus", "Marlee", "Marlen", "Marlene", "Marley", "Marlin", "Marlon", "Marques", "Marquis", "Marquise", "Marshall", "Marta", "Martin", "Martina", "Martine", "Marty", "Marvin", "Mary", "Maryam", "Maryjane", "Maryse", "Mason", "Mateo", "Mathew", "Mathias", "Mathilde", "Matilda", "Matilde", "Matt", "Matteo", "Mattie", "Maud", "Maude", "Maudie", "Maureen", "Maurice", "Mauricio", "Maurine", "Maverick", "Mavis", "Max", "Maxie", "Maxime", "Maximilian", "Maximillia", "Maximillian", "Maximo", "Maximus", "Maxine", "Maxwell", "May", "Maya", "Maybell", "Maybelle", "Maye", "Maymie", "Maynard", "Mayra", "Mazie", "Mckayla", "Mckenna", "Mckenzie", "Meagan", "Meaghan", "Meda", "Megane", "Meggie", "Meghan", "Mekhi", "Melany", "Melba", "Melisa", "Melissa", "Mellie", "Melody", "Melvin", "Melvina", "Melyna", "Melyssa", "Mercedes", "Meredith", "Merl", "Merle", "Merlin", "Merritt", "Mertie", "Mervin", "Meta", "Mia", "Micaela", "Micah", "Michael", "Michaela", "Michale", "Micheal", "Michel", "Michele", "Michelle", "Miguel", "Mikayla", "Mike", "Mikel", "Milan", "Miles", "Milford", "Miller", "Millie", "Milo", "Milton", "Mina", "Minerva", "Minnie", "Miracle", "Mireille", "Mireya", "Misael", "Missouri", "Misty", "Mitchel", "Mitchell", "Mittie", "Modesta", "Modesto", "Mohamed", "Mohammad", "Mohammed", "Moises", "Mollie", "Molly", "Mona", "Monica", "Monique", "Monroe", "Monserrat", "Monserrate", "Montana", "Monte", "Monty", "Morgan", "Moriah", "Morris", "Mortimer", "Morton", "Mose", "Moses", "Moshe", "Mossie", "Mozell", "Mozelle", "Muhammad", "Muriel", "Murl", "Murphy", "Murray", "Mustafa", "Mya", "Myah", "Mylene", "Myles", "Myra", "Myriam", "Myrl", "Myrna", "Myron", "Myrtice", "Myrtie", "Myrtis", "Myrtle", "Nadia", "Nakia", "Name", "Nannie", "Naomi", "Naomie", "Napoleon", "Narciso", "Nash", "Nasir", "Nat", "Natalia", "Natalie", "Natasha", "Nathan", "Nathanael", "Nathanial", "Nathaniel", "Nathen", "Nayeli", "Neal", "Ned", "Nedra", "Neha", "Neil", "Nelda", "Nella", "Nelle", "Nellie", "Nels", "Nelson", "Neoma", "Nestor", "Nettie", "Neva", "Newell", "Newton", "Nia", "Nicholas", "Nicholaus", "Nichole", "Nick", "Nicklaus", "Nickolas", "Nico", "Nicola", "Nicolas", "Nicole", "Nicolette", "Nigel", "Nikita", "Nikki", "Nikko", "Niko", "Nikolas", "Nils", "Nina", "Noah", "Noble", "Noe", "Noel", "Noelia", "Noemi", "Noemie", "Noemy", "Nola", "Nolan", "Nona", "Nora", "Norbert", "Norberto", "Norene", "Norma", "Norris", "Norval", "Norwood", "Nova", "Novella", "Nya", "Nyah", "Nyasia", "Obie", "Oceane", "Ocie", "Octavia", "Oda", "Odell", "Odessa", "Odie", "Ofelia", "Okey", "Ola", "Olaf", "Ole", "Olen", "Oleta", "Olga", "Olin", "Oliver", "Ollie", "Oma", "Omari", "Omer", "Ona", "Onie", "Opal", "Ophelia", "Ora", "Oral", "Oran", "Oren", "Orie", "Orin", "Orion", "Orland", "Orlando", "Orlo", "Orpha", "Orrin", "Orval", "Orville", "Osbaldo", "Osborne", "Oscar", "Osvaldo", "Oswald", "Oswaldo", "Otha", "Otho", "Otilia", "Otis", "Ottilie", "Ottis", "Otto", "Ova", "Owen", "Ozella", "Pablo", "Paige", "Palma", "Pamela", "Pansy", "Paolo", "Paris", "Parker", "Pascale", "Pasquale", "Pat", "Patience", "Patricia", "Patrick", "Patsy", "Pattie", "Paul", "Paula", "Pauline", "Paxton", "Payton", "Pearl", "Pearlie", "Pearline", "Pedro", "Peggie", "Penelope", "Percival", "Percy", "Perry", "Pete", "Peter", "Petra", "Peyton", "Philip", "Phoebe", "Phyllis", "Pierce", "Pierre", "Pietro", "Pink", "Pinkie", "Piper", "Polly", "Porter", "Precious", "Presley", "Preston", "Price", "Prince", "Princess", "Priscilla", "Providenci", "Prudence", "Queen", "Queenie", "Quentin", "Quincy", "Quinn", "Quinten", "Quinton", "Rachael", "Rachel", "Rachelle", "Rae", "Raegan", "Rafael", "Rafaela", "Raheem", "Rahsaan", "Rahul", "Raina", "Raleigh", "Ralph", "Ramiro", "Ramon", "Ramona", "Randal", "Randall", "Randi", "Randy", "Ransom", "Raoul", "Raphael", "Raphaelle", "Raquel", "Rashad", "Rashawn", "Rasheed", "Raul", "Raven", "Ray", "Raymond", "Raymundo", "Reagan", "Reanna", "Reba", "Rebeca", "Rebecca", "Rebeka", "Rebekah", "Reece", "Reed", "Reese", "Regan", "Reggie", "Reginald", "Reid", "Reilly", "Reina", "Reinhold", "Remington", "Rene", "Renee", "Ressie", "Reta", "Retha", "Retta", "Reuben", "Reva", "Rex", "Rey", "Reyes", "Reymundo", "Reyna", "Reynold", "Rhea", "Rhett", "Rhianna", "Rhiannon", "Rhoda", "Ricardo", "Richard", "Richie", "Richmond", "Rick", "Rickey", "Rickie", "Ricky", "Rico", "Rigoberto", "Riley", "Rita", "River", "Robb", "Robbie", "Robert", "Roberta", "Roberto", "Robin", "Robyn", "Rocio", "Rocky", "Rod", "Roderick", "Rodger", "Rodolfo", "Rodrick", "Rodrigo", "Roel", "Rogelio", "Roger", "Rogers", "Rolando", "Rollin", "Roma", "Romaine", "Roman", "Ron", "Ronaldo", "Ronny", "Roosevelt", "Rory", "Rosa", "Rosalee", "Rosalia", "Rosalind", "Rosalinda", "Rosalyn", "Rosamond", "Rosanna", "Rosario", "Roscoe", "Rose", "Rosella", "Roselyn", "Rosemarie", "Rosemary", "Rosendo", "Rosetta", "Rosie", "Rosina", "Roslyn", "Ross", "Rossie", "Rowan", "Rowena", "Rowland", "Roxane", "Roxanne", "Roy", "Royal", "Royce", "Rozella", "Ruben", "Rubie", "Ruby", "Rubye", "Rudolph", "Rudy", "Rupert", "Russ", "Russel", "Russell", "Rusty", "Ruth", "Ruthe", "Ruthie", "Ryan", "Ryann", "Ryder", "Rylan", "Rylee", "Ryleigh", "Ryley", "Sabina", "Sabrina", "Sabryna", "Sadie", "Sadye", "Sage", "Saige", "Sallie", "Sally", "Salma", "Salvador", "Salvatore", "Sam", "Samanta", "Samantha", "Samara", "Samir", "Sammie", "Sammy", "Samson", "Sandra", "Sandrine", "Sandy", "Sanford", "Santa", "Santiago", "Santina", "Santino", "Santos", "Sarah", "Sarai", "Sarina", "Sasha", "Saul", "Savanah", "Savanna", "Savannah", "Savion", "Scarlett", "Schuyler", "Scot", "Scottie", "Scotty", "Seamus", "Sean", "Sebastian", "Sedrick", "Selena", "Selina", "Selmer", "Serena", "Serenity", "Seth", "Shad", "Shaina", "Shakira", "Shana", "Shane", "Shanel", "Shanelle", "Shania", "Shanie", "Shaniya", "Shanna", "Shannon", "Shanny", "Shanon", "Shany", "Sharon", "Shaun", "Shawn", "Shawna", "Shaylee", "Shayna", "Shayne", "Shea", "Sheila", "Sheldon", "Shemar", "Sheridan", "Sherman", "Sherwood", "Shirley", "Shyann", "Shyanne", "Sibyl", "Sid", "Sidney", "Sienna", "Sierra", "Sigmund", "Sigrid", "Sigurd", "Silas", "Sim", "Simeon", "Simone", "Sincere", "Sister", "Skye", "Skyla", "Skylar", "Sofia", "Soledad", "Solon", "Sonia", "Sonny", "Sonya", "Sophia", "Sophie", "Spencer", "Stacey", "Stacy", "Stan", "Stanford", "Stanley", "Stanton", "Stefan", "Stefanie", "Stella", "Stephan", "Stephania", "Stephanie", "Stephany", "Stephen", "Stephon", "Sterling", "Steve", "Stevie", "Stewart", "Stone", "Stuart", "Summer", "Sunny", "Susan", "Susana", "Susanna", "Susie", "Suzanne", "Sven", "Syble", "Sydnee", "Sydney", "Sydni", "Sydnie", "Sylvan", "Sylvester", "Sylvia", "Tabitha", "Tad", "Talia", "Talon", "Tamara", "Tamia", "Tania", "Tanner", "Tanya", "Tara", "Taryn", "Tate", "Tatum", "Tatyana", "Taurean", "Tavares", "Taya", "Taylor", "Teagan", "Ted", "Telly", "Terence", "Teresa", "Terrance", "Terrell", "Terrence", "Terrill", "Terry", "Tess", "Tessie", "Tevin", "Thad", "Thaddeus", "Thalia", "Thea", "Thelma", "Theo", "Theodora", "Theodore", "Theresa", "Therese", "Theresia", "Theron", "Thomas", "Thora", "Thurman", "Tia", "Tiana", "Tianna", "Tiara", "Tierra", "Tiffany", "Tillman", "Timmothy", "Timmy", "Timothy", "Tina", "Tito", "Titus", "Tobin", "Toby", "Tod", "Tom", "Tomas", "Tomasa", "Tommie", "Toney", "Toni", "Tony", "Torey", "Torrance", "Torrey", "Toy", "Trace", "Tracey", "Tracy", "Travis", "Travon", "Tre", "Tremaine", "Tremayne", "Trent", "Trenton", "Tressa", "Tressie", "Treva", "Trever", "Trevion", "Trevor", "Trey", "Trinity", "Trisha", "Tristian", "Tristin", "Triston", "Troy", "Trudie", "Trycia", "Trystan", "Turner", "Twila", "Tyler", "Tyra", "Tyree", "Tyreek", "Tyrel", "Tyrell", "Tyrese", "Tyrique", "Tyshawn", "Tyson", "Ubaldo", "Ulices", "Ulises", "Una", "Unique", "Urban", "Uriah", "Uriel", "Ursula", "Vada", "Valentin", "Valentina", "Valentine", "Valerie", "Vallie", "Van", "Vance", "Vanessa", "Vaughn", "Veda", "Velda", "Vella", "Velma", "Velva", "Vena", "Verda", "Verdie", "Vergie", "Verla", "Verlie", "Vern", "Verna", "Verner", "Vernice", "Vernie", "Vernon", "Verona", "Veronica", "Vesta", "Vicenta", "Vicente", "Vickie", "Vicky", "Victor", "Victoria", "Vida", "Vidal", "Vilma", "Vince", "Vincent", "Vincenza", "Vincenzo", "Vinnie", "Viola", "Violet", "Violette", "Virgie", "Virgil", "Virginia", "Virginie", "Vita", "Vito", "Viva", "Vivian", "Viviane", "Vivianne", "Vivien", "Vivienne", "Vladimir", "Wade", "Waino", "Waldo", "Walker", "Wallace", "Walter", "Walton", "Wanda", "Ward", "Warren", "Watson", "Wava", "Waylon", "Wayne", "Webster", "Weldon", "Wellington", "Wendell", "Wendy", "Werner", "Westley", "Weston", "Whitney", "Wilber", "Wilbert", "Wilburn", "Wiley", "Wilford", "Wilfred", "Wilfredo", "Wilfrid", "Wilhelm", "Wilhelmine", "Will", "Willa", "Willard", "William", "Willie", "Willis", "Willow", "Willy", "Wilma", "Wilmer", "Wilson", "Wilton", "Winfield", "Winifred", "Winnifred", "Winona", "Winston", "Woodrow", "Wyatt", "Wyman", "Xander", "Xavier", "Xzavier", "Yadira", "Yasmeen", "Yasmin", "Yasmine", "Yazmin", "Yesenia", "Yessenia", "Yolanda", "Yoshiko", "Yvette", "Yvonne", "Zachariah", "Zachary", "Zachery", "Zack", "Zackary", "Zackery", "Zakary", "Zander", "Zane", "Zaria", "Zechariah", "Zelda", "Zella", "Zelma", "Zena", "Zetta", "Zion", "Zita", "Zoe", "Zoey", "Zoie", "Zoila", "Zola", "Zora", "Zula"}, + "middle": {"Abdul", "Abdullah", "Abigail", "Ada", "Adam", "Adelaide", "Adele", "Adelina", "Adrian", "Adriana", "Agnes", "Agnolo", "Ahmed", "Aida", "Aileen", "Aimee", "Akilesh", "Akio", "Alan", "Alana", "Alejandro", "Alex", "Ali", "Alice", "Alicia", "Alina", "Alison", "Alita", "Allegretta", "Alonzo", "Alyssa", "Aman", "Amara", "Amelda", "Amelia", "Amenra", "Amina", "Amir", "Amitabh", "Amy", "Ana", "Anastasia", "André", "Andrea", "Andrei", "Andrew", "Andy", "Angel", "Angela", "Anita", "Ann", "Anna", "Anne", "Annette", "Anthony", "Antioco", "Antonio", "Arduino", "Aria", "Ariana", "Ariel", "Aris", "Arjun", "Armando", "Asha", "Ashton", "Asong", "Athena", "Audrey", "August", "Aura", "Aurelia", "Austen", "Ava", "Avery", "Avril", "Badru", "Bailey", "Bakul", "Baldwin", "Bao", "Barack", "Bear", "Beatrice", "Beau", "Belinda", "Bella", "Belle", "Ben", "Benjamin", "Bertha", "Beverly", "Bharati", "Bhoja", "Bhuma", "Bianca", "Bird", "Birdie", "Bishvajit", "Bjorn", "Blair", "Blake", "Blanca", "Bliss", "Blue", "Bo", "Bobbie", "Bonnie", "Boris", "Bradley", "Brandt", "Braulia", "Breck", "Bree", "Brett", "Brianna", "Bridget", "Brie", "Brielle", "Brittany", "Brizio", "Brook", "Brooke", "Brooks", "Bruce", "Bryce", "Bryn", "Brynn", "Burke", "Cajetan", "Calvin", "Cameron", "Camilla", "Candice", "Carla", "Carlos", "Carmen", "Caroline", "Carson", "Casey", "Cash", "Cassandra", "Cassidy", "Catherine", "Cecelia", "Cecilia", "Cedric", "Celeste", "Celia", "Celso", "Chahna", "Chance", "Chander", "Chandler", "Chang", "Charles", "Charlie", "Charlotte", "Chen", "Chintak", "Chloe", "Chris", "Christine", "Chung", "Cimeron", "Cindy", "Ciprianna", "Ciro", "Claire", "Clara", "Clarissa", "Clark", "Clarke", "Claude", "Claudia", "Clay", "Clementine", "Clint", "Cody", "Cole", "Colette", "Cora", "Cordelia", "Corey", "Corinne", "Cory", "Cosme", "Courtney", "Cree", "Crew", "Cynthia", "Cyprienne", "Cyrus", "Daan", "Dada", "Daisy", "Dakota", "Dale", "Damodar", "Dan", "Dana", "Dane", "Daniel", "Danielle", "Danveer", "Daphne", "Darla", "David", "Davide", "Dawn", "Dax", "Dean", "Deborah", "Delilah", "Denise", "Denver", "Deshal", "Deshawn", "Dev", "Devin", "Dhavala", "Diana", "Diane", "Diego", "Dmitri", "Dolores", "Dolorita", "Donato", "Dong", "Donna", "Donte", "Donya", "Dora", "Doris", "Dorothy", "Drake", "Drew", "Dru", "Dylan", "Ean", "Edith", "Eduardo", "Edward", "Eila", "Eileen", "Elaine", "Elda", "Eleanor", "Elena", "Eliana", "Elias", "Elise", "Eliza", "Elizabeth", "Ella", "Elle", "Ellen", "Ellie", "Ellis", "Eloise", "Elsa", "Elsie", "Em", "Emerson", "Emery", "Emilie", "Emilio", "Emily", "Emma", "Emmett", "Enrico", "Enrique", "Epifania", "Erica", "Erik", "Erin", "Eroica", "Esperanza", "Estelle", "Esther", "Etta", "Ettore", "Eva", "Evan", "Eve", "Evelyn", "Everett", "Faith", "Farid", "Faye", "Federico", "Felicity", "Felipe", "Felix", "Fern", "Fernando", "Finley", "Finn", "Fiona", "Fitz", "Flint", "Flora", "Florence", "Flynn", "Folke", "Fonzo", "Fox", "Frances", "Francis", "Francisco", "Francois", "François", "Frank", "Frankie", "Freya", "Fumio", "Fynn", "Gabriel", "Gabriella", "Gael", "Gage", "Gail", "Gemma", "Genevieve", "George", "Georgia", "Geraldine", "Giannino", "Ginetta", "Gioia", "Giselle", "Giuseppe", "Giustino", "Glenn", "Gloria", "Glory", "Grace", "Grant", "Gray", "Greer", "Greta", "Guido", "Guillermo", "Gulshan", "Gus", "Gwen", "Gyula", "Hank", "Hannah", "Hans", "Harley", "Harper", "Harriet", "Harrison", "Harshad", "Haruki", "Hayden", "Hayes", "Haze", "Hazel", "Heath", "Heather", "Hector", "Helen", "Helena", "Henry", "Hideki", "Hidetoshi", "Himesh", "Hiro", "Hiroaki", "Hirofumi", "Hirokazu", "Hiroshi", "Hiroto", "Hiroyuki", "Holly", "Honor", "Hope", "Hugh", "Hugo", "Hunter", "Ida", "Ignacio", "Imogen", "Ingrid", "Irene", "Iris", "Isaac", "Isabel", "Isabella", "Isabelle", "Ivan", "Ivy", "Jace", "Jack", "Jacqueline", "Jade", "Jaden", "Jae", "Jai", "Jaime", "Jamal", "James", "Jamie", "Jan", "Janak", "Jane", "Janet", "Janice", "Jasmine", "Jasper", "Javier", "Jax", "Jay", "Jayden", "Jayne", "Jean", "Jeanne", "Jed", "Jenna", "Jennifer", "Jesse", "Jessica", "Jill", "Jin", "Joan", "Joanna", "João", "Jocelyn", "Jodi", "Jody", "Joe", "Joey", "Johanna", "Johar", "John", "Jolene", "Jordan", "Jorge", "Jose", "José", "Joseph", "Josephine", "Josie", "Joy", "Joyce", "Juan", "Juanita", "Judd", "Jude", "Judith", "Jules", "Julia", "Julian", "Juliana", "Julianne", "Julie", "June", "Justine", "Kael", "Kai", "Kane", "Karen", "Kate", "Katherine", "Kathleen", "Kathryn", "Katie", "Katrina", "Kay", "Kayla", "Kazuki", "Keira", "Kelly", "Kelsey", "Kendall", "Kendra", "Kennedy", "Kent", "Kenta", "Kerry", "Khaled", "Khloe", "Kiara", "Kim", "Kimberly", "Kit", "Kiyoshi", "Klaus", "Knight", "Knox", "Koen", "Koi", "Koichi", "Koji", "Kolt", "Kristen", "Kristina", "Kurt", "Kwame", "Kye", "Kylie", "Lacey", "Laine", "Lake", "Lakshman", "Lalika", "Lane", "Lark", "Lars", "Laurel", "Layne", "Lee", "Leif", "Lennon", "Leo", "Leon", "Leslie", "Liam", "Liberty", "Lilian", "Lillian", "Lillie", "Link", "Liz", "Locke", "Logan", "Lona", "Lorena", "Lorenzo", "Lou", "Louise", "Love", "Lucia", "Lucy", "Luis", "Luiz", "Luke", "Lupita", "Lux", "Luz", "Lydia", "Lynn", "Mabel", "Mac", "Mack", "Mackenzie", "Madeline", "Madison", "Madona", "Mae", "Mael", "Makoto", "Manuel", "Manuela", "Maple", "Marc", "Marco", "Margaret", "Margo", "Margot", "Maria", "Mariano", "Maricela", "Marilyn", "Mario", "Mark", "Marley", "Mars", "Marti", "Mary", "Mason", "Matthew", "Mavis", "Max", "May", "Mazie", "Mei", "Melody", "Mercy", "Merle", "Micah", "Michael", "Miguel", "Mina", "Ming", "Mohamed", "Mollie", "Monroe", "Morgan", "Muhammad", "Musetta", "Myra", "Nadine", "Naomi", "Nardo", "Nat", "Natalie", "Neal", "Neil", "Nellie", "Nerola", "Nevada", "Neve", "Nikolai", "Niles", "Noel", "Nola", "Nora", "Nuru", "Oakley", "Olive", "Oliver", "Opal", "Orazio", "Ortensa", "Ortensia", "Osamu", "Oscar", "Otto", "Pablo", "Paige", "Pancho", "Paris", "Parker", "Pat", "Patrick", "Paul", "Pauli", "Pax", "Peace", "Pearl", "Pedro", "Penelope", "Penn", "Penny", "Peter", "Petra", "Peyton", "Phoenix", "Pierce", "Pierre", "Pilar", "Porter", "Praise", "Pratap", "Presley", "Priscilla", "Quinn", "Rachanna", "Radames", "Rae", "Rafael", "Rain", "Raine", "Ramiro", "Ramon", "Ramona", "Raphael", "Raul", "Ravi", "Ray", "Rayne", "Reagan", "Reece", "Reed", "Reese", "Rei", "Reid", "Reilly", "Remy", "Ren", "Reyes", "Rhodes", "Ricardo", "Richard", "Riley", "Rita", "River", "Rivera", "Roan", "Robert", "Roberto", "Robin", "Robt", "Rodrigo", "Roma", "Romelia", "Rory", "Rosa", "Rosalee", "Rosalie", "Rosalynn", "Rosario", "Rose", "Ross", "Rowan", "Ruben", "Ruby", "Rue", "Rush", "Russell", "Ruth", "Ryan", "Saad", "Saariq", "Sade", "Sadie", "Sagara", "Sage", "Saige", "Saint", "Salvadora", "Sam", "Samir", "Samuel", "Sante", "Santiago", "Sara", "Sasha", "Satoshi", "Scott", "Sean", "Sebastian", "Sergei", "Sergio", "Seth", "Shae", "Shai", "Shane", "Shannon", "Shashi", "Shaun", "Shawn", "Shawnee", "Shay", "Shea", "Shelby", "Shin", "Sidney", "Simon", "Sky", "Skye", "Skyler", "Sol", "Sophie", "Spencer", "Star", "Starr", "Stella", "Steve", "Stevie", "Storm", "Susan", "Sven", "Sybil", "Sydney", "Tahj", "Takashi", "Takeshi", "Taryn", "Tatum", "Taylor", "Teagan", "Terry", "Tess", "Thea", "Theodore", "Thomas", "Tilly", "Timothy", "Tosca", "Trent", "Tripp", "Tristan", "Truth", "Tyler", "Tyrone", "Uberto", "Ursus", "Val", "Vandelia", "Vaughn", "Vera", "Vernon", "Verona", "Vianna", "Victoria", "Vida", "Vieda", "Vince", "Vincent", "Violet", "Virginia", "Vivian", "Vladimir", "Wade", "Wayne", "Wes", "Wesley", "West", "Whitney", "Will", "Willa", "William", "Willie", "Winston", "Winter", "Wolf", "Wren", "Wynn", "Xavier", "Yasuo", "Yoel", "Yolanda", "Yoshi", "Yoshiaki", "Yoshihiro", "Yoshiki", "Yoshinori", "Yoshio", "Yusuf", "Yutaka", "Zain", "Zane", "Zayd", "Zelda", "Zeus", "Zev", "Zhang", "Zhen", "Zola", "Zora", "Zuni"}, + "last": {"Abbott", "Abernathy", "Abshire", "Adams", "Altenwerth", "Anderson", "Ankunding", "Armstrong", "Auer", "Aufderhar", "Bahringer", "Bailey", "Balistreri", "Barrows", "Bartell", "Bartoletti", "Barton", "Bashirian", "Batz", "Bauch", "Baumbach", "Bayer", "Beahan", "Beatty", "Bechtelar", "Becker", "Bednar", "Beer", "Beier", "Berge", "Bergnaum", "Bergstrom", "Bernhard", "Bernier", "Bins", "Blanda", "Blick", "Block", "Bode", "Boehm", "Bogan", "Bogisich", "Borer", "Bosco", "Botsford", "Boyer", "Boyle", "Bradtke", "Brakus", "Braun", "Breitenberg", "Brekke", "Brown", "Bruen", "Buckridge", "Carroll", "Carter", "Cartwright", "Casper", "Cassin", "Champlin", "Christiansen", "Cole", "Collier", "Collins", "Conn", "Connelly", "Conroy", "Considine", "Corkery", "Cormier", "Corwin", "Cremin", "Crist", "Crona", "Cronin", "Crooks", "Cruickshank", "Cummerata", "Cummings", "Dach", "Damore", "Daniel", "Dare", "Daugherty", "Davis", "Deckow", "Denesik", "Dibbert", "Dickens", "Dicki", "Dickinson", "Dietrich", "Donnelly", "Dooley", "Douglas", "Doyle", "DuBuque", "Durgan", "Ebert", "Effertz", "Eichmann", "Emard", "Emmerich", "Erdman", "Ernser", "Fadel", "Fahey", "Farrell", "Fay", "Feeney", "Feest", "Feil", "Ferry", "Fisher", "Flatley", "Frami", "Franecki", "Friesen", "Fritsch", "Funk", "Gaylord", "Gerhold", "Gerlach", "Gibson", "Gislason", "Gleason", "Gleichner", "Glover", "Goldner", "Goodwin", "Gorczany", "Gottlieb", "Goyette", "Grady", "Graham", "Grant", "Green", "Greenfelder", "Greenholt", "Grimes", "Gulgowski", "Gusikowski", "Gutkowski", "Gutmann", "Haag", "Hackett", "Hagenes", "Hahn", "Haley", "Halvorson", "Hamill", "Hammes", "Hand", "Hane", "Hansen", "Harber", "Harris", "Hartmann", "Harvey", "Hauck", "Hayes", "Heaney", "Heathcote", "Hegmann", "Heidenreich", "Heller", "Herman", "Hermann", "Hermiston", "Herzog", "Hessel", "Hettinger", "Hickle", "Hilll", "Hills", "Hilpert", "Hintz", "Hirthe", "Hodkiewicz", "Hoeger", "Homenick", "Hoppe", "Howe", "Howell", "Hudson", "Huel", "Huels", "Hyatt", "Jacobi", "Jacobs", "Jacobson", "Jakubowski", "Jaskolski", "Jast", "Jenkins", "Jerde", "Jewess", "Johns", "Johnson", "Johnston", "Jones", "Kassulke", "Kautzer", "Keebler", "Keeling", "Kemmer", "Kerluke", "Kertzmann", "Kessler", "Kiehn", "Kihn", "Kilback", "King", "Kirlin", "Klein", "Kling", "Klocko", "Koch", "Koelpin", "Koepp", "Kohler", "Konopelski", "Koss", "Kovacek", "Kozey", "Krajcik", "Kreiger", "Kris", "Kshlerin", "Kub", "Kuhic", "Kuhlman", "Kuhn", "Kulas", "Kunde", "Kunze", "Kuphal", "Kutch", "Kuvalis", "Labadie", "Lakin", "Lang", "Langosh", "Langworth", "Larkin", "Larson", "Leannon", "Lebsack", "Ledner", "Leffler", "Legros", "Lehner", "Lemke", "Lesch", "Leuschke", "Lind", "Lindgren", "Littel", "Little", "Lockman", "Lowe", "Lubowitz", "Lueilwitz", "Luettgen", "Lynch", "Macejkovic", "Maggio", "Mann", "Mante", "Marks", "Marquardt", "Marvin", "Mayer", "Mayert", "McClure", "McCullough", "McDermott", "McGlynn", "McKenzie", "McLaughlin", "Medhurst", "Mertz", "Metz", "Miller", "Mills", "Mitchell", "Moen", "Mohr", "Monahan", "Moore", "Morar", "Morissette", "Mosciski", "Mraz", "Mueller", "Muller", "Murazik", "Murphy", "Murray", "Nader", "Nicolas", "Nienow", "Nikolaus", "Nitzsche", "Nolan", "Oberbrunner", "Okuneva", "Olson", "Ondricka", "OReilly", "Orn", "Ortiz", "Osinski", "Pacocha", "Padberg", "Pagac", "Parisian", "Parker", "Paucek", "Pfannerstill", "Pfeffer", "Pollich", "Pouros", "Powlowski", "Predovic", "Price", "Prohaska", "Prosacco", "Purdy", "Quigley", "Quitzon", "Rath", "Ratke", "Rau", "Raynor", "Reichel", "Reichert", "Reilly", "Reinger", "Rempel", "Renner", "Reynolds", "Rice", "Rippin", "Ritchie", "Robel", "Roberts", "Rodriguez", "Rogahn", "Rohan", "Rolfson", "Romaguera", "Roob", "Rosenbaum", "Rowe", "Ruecker", "Runolfsdottir", "Runolfsson", "Runte", "Russel", "Rutherford", "Ryan", "Sanford", "Satterfield", "Sauer", "Sawayn", "Schaden", "Schaefer", "Schamberger", "Schiller", "Schimmel", "Schinner", "Schmeler", "Schmidt", "Schmitt", "Schneider", "Schoen", "Schowalter", "Schroeder", "Schulist", "Schultz", "Schumm", "Schuppe", "Schuster", "Senger", "Shanahan", "Shields", "Simonis", "Sipes", "Skiles", "Smith", "Smitham", "Spencer", "Spinka", "Sporer", "Stamm", "Stanton", "Stark", "Stehr", "Steuber", "Stiedemann", "Stokes", "Stoltenberg", "Stracke", "Streich", "Stroman", "Strosin", "Swaniawski", "Swift", "Terry", "Thiel", "Thompson", "Tillman", "Torp", "Torphy", "Towne", "Toy", "Trantow", "Tremblay", "Treutel", "Tromp", "Turcotte", "Turner", "Ullrich", "Upton", "Vandervort", "Veum", "Volkman", "Von", "VonRueden", "Waelchi", "Walker", "Walsh", "Walter", "Ward", "Waters", "Watsica", "Weber", "Wehner", "Weimann", "Weissnat", "Welch", "West", "White", "Wiegand", "Wilderman", "Wilkinson", "Will", "Williamson", "Willms", "Windler", "Wintheiser", "Wisoky", "Wisozk", "Witting", "Wiza", "Wolf", "Wolff", "Wuckert", "Wunsch", "Wyman", "Yost", "Yundt", "Zboncak", "Zemlak", "Ziemann", "Zieme", "Zulauf"}, + "hobby": {"3D printing", "Acrobatics", "Acting", "Amateur radio", "Animation", "Aquascaping", "Astrology", "Astronomy", "Baking", "Baton twirling", "Blogging", "Building", "Board/tabletop games", "Book discussion clubs", "Book restoration", "Bowling", "Brazilian jiu-jitsu", "Breadmaking", "Bullet journaling", "Cabaret", "Calligraphy", "Candle making", "Candy making", "Car fixing & building", "Card games", "Cheesemaking", "Cleaning", "Clothesmaking", "Coffee roasting", "Collecting", "Coloring", "Computer programming", "Confectionery", "Cooking", "Cosplaying", "Couponing", "Craft", "Creative writing", "Crocheting", "Cross-stitch", "Crossword puzzles", "Cryptography", "Cue sports", "Dance", "Digital arts", "Distro Hopping", "DJing", "Do it yourself", "Drama", "Drawing", "Drink mixing", "Drinking", "Electronic games", "Electronics", "Embroidery", "Experimenting", "Fantasy sports", "Fashion", "Fashion design", "Fishkeeping", "Filmmaking", "Flower arranging", "Fly tying", "Foreign language learning", "Furniture building", "Gaming", "Genealogy", "Gingerbread house making", "Glassblowing", "Graphic design", "Gunsmithing", "Gymnastics", "Hacking", "Herp keeping", "Home improvement", "Homebrewing", "Houseplant care", "Hula hooping", "Humor", "Hydroponics", "Ice skating", "Jewelry making", "Jigsaw puzzles", "Journaling", "Juggling", "Karaoke", "Karate", "Kendama", "Knife making", "Knitting", "Knot tying", "Kombucha brewing", "Lace making", "Lapidary", "Leather crafting", "Lego building", "Lock picking", "Listening to music", "Listening to podcasts", "Machining", "Macrame", "Magic", "Makeup", "Mazes (indoor/outdoor)", "Metalworking", "Model building", "Model engineering", "Nail art", "Needlepoint", "Origami", "Painting", "Palmistry", "Pet adoption & fostering", "Philately", "Photography", "Practical jokes", "Pressed flower craft", "Playing musical instruments", "Poi", "Pottery", "Powerlifting", "Puzzles", "Quilling", "Quilting", "Quizzes", "Radio-controlled model", "Rail transport modeling", "Rapping", "Reading", "Refinishing", "Reiki", "Robot combat", "Rubik's Cube", "Scrapbooking", "Sculpting", "Sewing", "Shoemaking", "Singing", "Sketching", "Skipping rope", "Slot car", "Soapmaking", "Social media", "Spreadsheets", "Stand-up comedy", "Stamp collecting", "Table tennis", "Tarot", "Taxidermy", "Thrifting", "Video editing", "Video game developing", "Video gaming", "Watching movies", "Watching television", "Videography", "Virtual reality", "Waxing", "Weaving", "Weight training", "Welding", "Whittling", "Wikipedia editing", "Winemaking", "Wood carving", "Woodworking", "Worldbuilding", "Writing", "Word searches", "Yo-yoing", "Yoga", "Zumba", "Amusement park visiting", "Air sports", "Airsoft", "Amateur geology", "Archery", "Astronomy", "Backpacking", "Badminton", "BASE jumping", "Baseball", "Basketball", "Beekeeping", "Birdwatching", "Blacksmithing", "BMX", "Board sports", "Bodybuilding", "Bonsai", "Butterfly watching", "Bus riding", "Camping", "Canoeing", "Canyoning", "Car riding", "Caving", "Composting", "Cycling", "Dowsing", "Driving", "Farming", "Fishing", "Flag football", "Flower growing", "Flying", "Flying disc", "Foraging", "Fossicking", "Freestyle football", "Gardening", "Geocaching", "Ghost hunting", "Gold prospecting", "Graffiti", "Handball", "Herbalism", "Herping", "High-power rocketry", "Hiking", "Hobby horsing", "Hobby tunneling", "Hooping", "Horseback riding", "Hunting", "Inline skating", "Jogging", "Jumping rope", "Kayaking", "Kite flying", "Kitesurfing", "Lacrosse", "LARPing", "Letterboxing", "Longboarding", "Martial arts", "Metal detecting", "Meteorology", "Motor sports", "Mountain biking", "Mountaineering", "Museum visiting", "Mushroom hunting", "Netball", "Nordic skating", "Orienteering", "Paintball", "Parkour", "Photography", "Podcast hosting", "Polo", "Public transport riding", "Rafting", "Railway journeys", "Rappelling", "Road biking", "Rock climbing", "Roller skating", "Rugby", "Running", "Radio-controlled model", "Sailing", "Sand art", "Scouting", "Scuba diving", "Sculling", "Shooting", "Shopping", "Shuffleboard", "Skateboarding", "Skiing", "Skimboarding", "Skydiving", "Slacklining", "Snowboarding", "Snowmobiling", "Snowshoeing", "Soccer", "Stone skipping", "Sun bathing", "Surfing", "Survivalism", "Swimming", "Taekwondo", "Tai chi", "Tennis", "Topiary", "Tourism", "Thru-hiking", "Trade fair visiting", "Travel", "Urban exploration", "Vacation", "Vegetable farming", "Videography", "Vehicle restoration", "Walking", "Water sports", "Astronomy", "Biology", "Chemistry", "Electrochemistry", "Physics", "Psychology", "Sports science", "Geography", "History", "Mathematics", "Railway studies", "Action figure", "Antiquing", "Ant-keeping", "Art collecting", "Book collecting", "Button collecting", "Cartophily", "Coin collecting", "Comic book collecting", "Deltiology", "Die-cast toy", "Digital hoarding", "Dolls", "Element collecting", "Ephemera collecting", "Fusilately", "Knife collecting", "Lotology", "Movie and movie memorabilia collecting", "Fingerprint collecting", "Perfume", "Phillumeny", "Radio-controlled model", "Rail transport modelling", "Record collecting", "Rock tumbling", "Scutelliphily", "Shoes", "Slot car", "Sports memorabilia", "Stamp collecting", "Stuffed toy collecting", "Tea bag collecting", "Ticket collecting", "Toys", "Transit map collecting", "Video game collecting", "Vintage cars", "Vintage clothing", "Vinyl Records", "Antiquities", "Auto audiophilia", "Flower collecting and pressing", "Fossil hunting", "Insect collecting", "Magnet fishing", "Metal detecting", "Mineral collecting", "Rock balancing", "Sea glass collecting", "Seashell collecting", "Stone collecting", "Animal fancy", "Axe throwing", "Backgammon", "Badminton", "Baton twirling", "Beauty pageants", "Billiards", "Bowling", "Boxing", "Bridge", "Checkers (draughts)", "Cheerleading", "Chess", "Color guard", "Cribbage", "Curling", "Dancing", "Darts", "Debate", "Dominoes", "Eating", "Esports", "Fencing", "Go", "Gymnastics", "Ice hockey", "Ice skating", "Judo", "Jujitsu", "Kabaddi", "Knowledge/word games", "Laser tag", "Longboarding", "Mahjong", "Marbles", "Martial arts", "Model United Nations", "Poker", "Pool", "Role-playing games", "Shogi", "Slot car racing", "Speedcubing", "Sport stacking", "Table football", "Table tennis", "Volleyball", "Weightlifting", "Wrestling", "Airsoft", "Archery", "Association football", "Australian rules football", "Auto racing", "Baseball", "Beach volleyball", "Breakdancing", "Climbing", "Cricket", "Croquet", "Cycling", "Disc golf", "Dog sport", "Equestrianism", "Exhibition drill", "Field hockey", "Figure skating", "Fishing", "Footbag", "Frisbee", "Golfing", "Handball", "Horseback riding", "Horseshoes", "Iceboat racing", "Jukskei", "Kart racing", "Knife throwing", "Lacrosse", "Longboarding", "Long-distance running", "Marching band", "Model aircraft", "Orienteering", "Pickleball", "Quidditch", "Race walking", "Racquetball", "Radio-controlled car racing", "Roller derby", "Rugby league football", "Sculling", "Shooting sport", "Skateboarding", "Skiing", "Sled dog racing", "Softball", "Speed skating", "Squash", "Surfing", "Swimming", "Table tennis", "Tennis", "Tennis polo", "Tether car", "Tour skating", "Tourism", "Trapshooting", "Triathlon", "Ultimate frisbee", "Volleyball", "Water polo", "Fishkeeping", "Learning", "Meditation", "Microscopy", "Reading", "Research", "Shortwave listening", "Audiophile", "Aircraft spotting", "Amateur astronomy", "Birdwatching", "Bus spotting", "Geocaching", "Gongoozling", "Herping", "Hiking", "Meteorology", "Photography", "Satellite watching", "Trainspotting", "Whale watching"}, + "phone": {"###-###-####", "(###)###-####", "1-###-###-####", "###.###.####"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/product.go b/vendor/github.com/brianvoe/gofakeit/v7/data/product.go new file mode 100644 index 0000000000..cbe5b3a5ca --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/product.go @@ -0,0 +1,171 @@ +package data + +var Product = map[string][]string{ + "category": { + "electronics", "clothing", "home appliances", "furniture", + "automotive parts", "beauty and personal care", "books", "sports equipment", + "toys and games", "outdoor gear", "pet supplies", "kitchenware", + "health and wellness", "tools and hardware", "office supplies", + "baby products", "jewelry", "home decor", "musical instruments", + "fitness equipment", "mobile phones", "computer accessories", "cameras and photography", + "gardening supplies", "bedding and linens", "food and groceries", "party supplies", + "craft and diy supplies", "camping gear", "watches", "luggage and travel accessories", + "board games", "art supplies", "stationery", "bath and shower products", + "sunglasses", "educational toys", "headphones and earbuds", "sneakers and athletic shoes", + "coffee and tea products", "bicycles and accessories", "cookware", "cosmetics", + "home improvement", "pet food", "laptop bags and cases", "home security systems", + "musical accessories", "skincare products", "smart home devices", + }, + + "adjective": { + "bold", "swift", "pure", "smart", "fresh", + "cool", "sharp", "zen", "bright", "quick", + "robust", "sleek", "versatile", "innovative", "compact", + "luxe", "modular", "precision", "stream", + }, + + "name": { + "phone", "laptop", "tablet", "watch", "camera", + "headphones", "speaker", "drone", "car", "bike", + "appliance", "gadget", "tool", "toy", "game", + "computer", "console", "smartwatch", "fitness tracker", "smart home device", + "robot", "router", "television", "smart speaker", "vr headset", + "earbuds", "printer", "mouse", "keyboard", "monitor", + "microwave", "blender", "vacuum", "fan", "toaster", + "clock", "lamp", "shaver", "scale", "thermometer", + "fridge", "oven", "mixer", "iron", "hair dryer", + "fan", "scale", "thermostat", "router", "lightbulb", + }, + + "feature": { + "wireless", "smart", "eco-friendly", "advanced", "compact", + "high-performance", "energy-efficient", "portable", "durable", "stylish", + "touchscreen", "water-resistant", "noise-canceling", "voice-controlled", "ultra-lightweight", + "multi-functional", "user-friendly", "fast-charging", "biometric", "gps-enabled", + }, + + "material": { + "titanium", "carbon", "alloy", "bamboo", "leather", + "glass", "ceramic", "aluminum", "stainless", "wood", + "plastic", "rubber", "silicon", "fabric", "paper", + "gold", "silver", "brass", "copper", "bronze", + "chrome", "marble", "granite", "porcelain", "plexiglass", + "quartz", "felt", "suede", + }, + + "suffix": { + "tech", "pro", "x", "plus", "elite", + "spark", "nexus", "nova", "fusion", "sync", + "edge", "boost", "max", "link", "prime", + "zoom", "pulse", "dash", "connect", "blaze", + "quantum", "spark", "vertex", "core", "flux", + "turbo", "shift", "wave", "matrix", + }, + + "benefit": { + "comfort", "efficiency", "safety", "reliability", + "versatility", "ease of use", "long battery life", + "precision", "enhanced connectivity", "portability", + "durability", "energy savings", "aesthetic appeal", + "health benefits", "convenience", "time-saving", + "high performance", "noise reduction", "user satisfaction", + "customizability", "sustainability", "cost-effectiveness", + "innovative features", "improved productivity", "enhanced experience", + "robust construction", "weather resistance", "minimal maintenance", + "increased functionality", "advanced technology", "ergonomic design", + }, + + "use_case": { + "home", "office", "outdoors", "fitness", "travel", "gaming", + "cooking", "music", "learning", "entertainment", "professional work", + "healthcare", "educational purposes", "commuting", "camping", "hiking", + "sports", "art and craft", "gardening", "cleaning", "personal grooming", + "relaxation", "home security", "pet care", "smart automation", "food preparation", + "baking", "social gatherings", "productivity", "collaboration", "DIY projects", + "childcare", "remote work", "photography", "videography", "wellness routines", + }, + + "target_audience": { + "children", "adults", "seniors", "students", "professionals", "athletes", + "travelers", "families", "pet owners", "homeowners", "gamers", "cooks", "DIY enthusiasts", + "musicians", "artists", + }, + + "dimension": { + "small", "medium", "large", "extra-large", "compact", "lightweight", + "heavy", "mini", "standard", "oversized", + }, + + "description": { + "This {adjectivedescriptive} {productname} is perfect for {productusecase}, offering {productfeature} and {productbenefit}. Made from {productmaterial}, it's designed for {productaudience} who value {productbenefit}.", + "Introducing the {adjectivedescriptive} {productname} {productsuffix}, featuring {productfeature} technology and made from {productmaterial}. It ensures {productbenefit} for {productaudience}, making it ideal for {productusecase}.", + "Perfect for {productusecase}, the {productname} is crafted with {adjectivedescriptive} {productmaterial} and features {productfeature} for {productaudience}. Enjoy {productbenefit} every day.", + "Designed with {productaudience} in mind, this {adjectivedescriptive} {productname} offers {productbenefit}. It's equipped with {productfeature} and made from {productmaterial} for maximum {productbenefit}.", + "The {productname} {productsuffix} combines {adjectivedescriptive} design and {productmaterial} build to deliver {productfeature}. Its {productdimension} size makes it perfect for {productusecase} and ideal for {productaudience}.", + "With a focus on {productaudience}, the {productname} is built with {adjectivedescriptive} {productmaterial} for {productbenefit} and features {productfeature} to meet the needs of {productusecase}.", + "Experience the {productbenefit} of the {productname} {productsuffix}, made from {productmaterial} with a {adjectivedescriptive} design. It's ideal for {productusecase} and loved by {productaudience}.", + "Whether you're using it at {productusecase} or on the go, this {adjectivedescriptive} {productname} offers {productfeature} and ensures {productbenefit}. Crafted from {productmaterial}, it's perfect for {productaudience}.", + "The {productname} is a {adjectivedescriptive} solution for {productusecase}, featuring {productfeature} technology and built with {productmaterial} for {productbenefit}. Suitable for {productaudience}.", + "For {productaudience} who need {productbenefit}, the {productname} {productsuffix} delivers with {productfeature}, {adjectivedescriptive} design, and durable {productmaterial} construction. Ideal for {productusecase}.", + "Built with {adjectivedescriptive} {productmaterial}, this item is ideal for {productusecase} and provides {productaudience} with {productbenefit} through its {productfeature}.", + "Experience {productbenefit} with this {adjectivedescriptive} product, featuring {productfeature} and made from {productmaterial}, perfect for {productusecase}.", + "Designed for {productaudience}, this {adjectivedescriptive} product ensures {productbenefit} and is equipped with {productfeature} for the best {productusecase} experience.", + "For those who need {productbenefit}, this {adjectivedescriptive} product, made of {productmaterial}, offers {productfeature} and is perfect for {productusecase}.", + "Take your {productusecase} to the next level with this {adjectivedescriptive} product. Built from {productmaterial}, it features {productfeature} for {productaudience}.", + "Crafted from {productmaterial}, this product is ideal for {productaudience} seeking {productbenefit}. Its {adjectivedescriptive} design and {productfeature} make it perfect for {productusecase}.", + "This product, made with {productmaterial}, is designed for {productaudience} who value {productbenefit}. Its {adjectivedescriptive} design includes {productfeature}, making it ideal for {productusecase}.", + "Enjoy {productbenefit} with this {adjectivedescriptive} item, featuring {productfeature} technology. Made from {productmaterial}, it's perfect for {productusecase}.", + "With {productfeature} and {adjectivedescriptive} {productmaterial}, this product offers {productbenefit} for {productaudience}, ideal for {productusecase}.", + "The perfect solution for {productusecase}, this {adjectivedescriptive} product provides {productbenefit} with its {productfeature}, crafted from {productmaterial} for {productaudience}.", + "Built for {productaudience}, this product features {productfeature} and ensures {productbenefit}. Made from {productmaterial}, it's an {adjectivedescriptive} choice for {productusecase}.", + "Achieve {productbenefit} with this {adjectivedescriptive} product. Crafted from {productmaterial}, it features {productfeature}, perfect for {productaudience} during {productusecase}.", + "For {productaudience}, this {adjectivedescriptive} item offers {productfeature} and is made of {productmaterial}, providing {productbenefit} for {productusecase}.", + "This {adjectivedescriptive} product is crafted from {productmaterial} and includes {productfeature}, making it perfect for {productusecase} and delivering {productbenefit} for {productaudience}.", + "Featuring {productfeature} and made from {productmaterial}, this {adjectivedescriptive} product is ideal for {productaudience} looking for {productbenefit} in {productusecase}.", + "For {productusecase}, this {adjectivedescriptive} product provides {productbenefit} with its {productfeature}, crafted for {productaudience} from high-quality {productmaterial}.", + "This {adjectivedescriptive} product is perfect for {productaudience} who need {productbenefit}. Built from {productmaterial} and featuring {productfeature}, it's ideal for {productusecase}.", + "Delivering {productbenefit}, this product is made from {productmaterial} and designed for {productaudience}. Its {adjectivedescriptive} design includes {productfeature}, perfect for {productusecase}.", + "For those interested in {productusecase}, this {adjectivedescriptive} product offers {productfeature} and is made of {productmaterial} to provide {productbenefit} for {productaudience}.", + "This product is crafted for {productaudience}, featuring {adjectivedescriptive} {productmaterial} and equipped with {productfeature} to ensure {productbenefit} during {productusecase}.", + "Transform your {productusecase} with this {adjectivedescriptive} product, featuring {productfeature} and made from high-quality {productmaterial} to provide {productbenefit}.", + "This {adjectivedescriptive} item, built for {productaudience}, uses {productfeature} technology to deliver {productbenefit} during {productusecase}.", + "Enjoy the luxury of {productbenefit} with this product, crafted from {productmaterial}. Its {adjectivedescriptive} design and {productfeature} make it ideal for {productusecase}.", + "Made from {productmaterial} and designed with {productaudience} in mind, this product offers {productbenefit} and features {productfeature} for excellent {productusecase}.", + "Achieve seamless {productusecase} with this {adjectivedescriptive} product. Built using {productmaterial}, it delivers {productbenefit} with the help of {productfeature}.", + "This product, made for {productaudience}, offers {productbenefit} with its {adjectivedescriptive} {productmaterial} build and advanced {productfeature}, perfect for {productusecase}.", + "Built with {productmaterial}, this {adjectivedescriptive} product is designed to provide {productbenefit} for {productaudience} through its {productfeature}, ideal for {productusecase}.", + "Elevate your {productusecase} experience with this {adjectivedescriptive} product, made from {productmaterial} and offering {productfeature} to ensure {productbenefit}.", + "Perfect for {productaudience} who value {productbenefit}, this product features {productfeature} and is crafted from {adjectivedescriptive} {productmaterial}, ideal for {productusecase}.", + "With a focus on {productusecase}, this {adjectivedescriptive} product, made from {productmaterial}, ensures {productbenefit} with its {productfeature} for {productaudience}.", + "Whether for {productusecase} or everyday use, this product delivers {productbenefit} with its {adjectivedescriptive} {productmaterial} construction and {productfeature}, crafted for {productaudience}.", + "This {adjectivedescriptive} product is perfect for {productusecase}, made with {productmaterial} and offering {productfeature} to ensure {productbenefit} for {productaudience}.", + "Featuring state-of-the-art {productfeature}, this product is designed from {productmaterial} to deliver {productbenefit} for {productaudience}, ideal for {productusecase}.", + "For {productusecase}, this {adjectivedescriptive} product is crafted from {productmaterial} to provide {productfeature}, ensuring {productbenefit} for {productaudience}.", + "Built for {productaudience}, this item features {adjectivedescriptive} {productmaterial} and advanced {productfeature} to deliver {productbenefit} during {productusecase}.", + "With {productfeature} and a {adjectivedescriptive} design, this product is made from {productmaterial} to provide {productbenefit} for {productaudience}, ideal for {productusecase}.", + "This {adjectivedescriptive} item, crafted from {productmaterial}, offers {productfeature} for {productusecase}, ensuring {productbenefit} for {productaudience}.", + "For those who value {productbenefit}, this product, made from {productmaterial}, includes {productfeature} and is perfect for {productusecase}, designed for {productaudience}.", + "Achieve superior {productusecase} with this product, featuring {productfeature} and made from {adjectivedescriptive} {productmaterial}, offering {productbenefit} to {productaudience}.", + "Delivering {productbenefit}, this product is crafted from {productmaterial} and equipped with {productfeature}, making it ideal for {productaudience} during {productusecase}.", + "Revolutionize your {productusecase} with this {adjectivedescriptive} item, featuring {productfeature} and crafted from {productmaterial} for {productaudience} seeking {productbenefit}.", + "This {adjectivedescriptive} item, designed for {productaudience}, is built from {productmaterial} and includes {productfeature} to ensure {productbenefit} for {productusecase}.", + "Enjoy enhanced {productusecase} with this product, featuring {productfeature} and made with {adjectivedescriptive} {productmaterial}, delivering {productbenefit}.", + "Perfect for {productaudience}, this {adjectivedescriptive} product includes {productfeature} and is crafted from {productmaterial} to provide {productbenefit} during {productusecase}.", + "Take your {productusecase} to new heights with this product, made from {productmaterial} and featuring {productfeature} for {productaudience} who value {productbenefit}.", + "Crafted from premium {productmaterial}, this item is designed for {productaudience} to provide {productbenefit} with its {adjectivedescriptive} build and {productfeature}.", + "This {adjectivedescriptive} product, made from {productmaterial}, offers {productbenefit} through its {productfeature}, ideal for {productaudience} engaging in {productusecase}.", + "Elevate your {productusecase} with this {adjectivedescriptive} product. It features {productfeature} and is crafted from {productmaterial} for {productaudience}.", + "Designed for {productaudience}, this product includes {productfeature} and a {adjectivedescriptive} {productmaterial} construction, ensuring {productbenefit} during {productusecase}.", + "This {adjectivedescriptive} item, featuring {productfeature}, is crafted from {productmaterial} to provide {productbenefit} for {productusecase}, perfect for {productaudience}.", + "Achieve exceptional {productusecase} with this product, featuring advanced {productfeature} and made from durable {productmaterial}, delivering {productbenefit} for {productaudience}.", + "Whether it's for {productusecase} or daily use, this {adjectivedescriptive} item is crafted from {productmaterial} and offers {productfeature} to deliver {productbenefit} for {productaudience}.", + "This product, ideal for {productaudience}, features {adjectivedescriptive} {productmaterial} and incorporates {productfeature} to ensure {productbenefit} during {productusecase}.", + "Built with {productmaterial}, this {adjectivedescriptive} item is perfect for {productusecase} and features {productfeature} to provide {productbenefit} to {productaudience}.", + "With {productfeature} and made from {adjectivedescriptive} {productmaterial}, this product ensures {productbenefit} for {productaudience}, ideal for {productusecase}.", + "This {adjectivedescriptive} item is built for {productaudience}, providing {productbenefit} with its {productmaterial} construction and advanced {productfeature}, perfect for {productusecase}.", + "For {productusecase}, this product delivers {productbenefit} to {productaudience} with its {adjectivedescriptive} design and {productfeature}, made from quality {productmaterial}.", + "Experience the benefits of {productfeature} with this {adjectivedescriptive} item, crafted from {productmaterial} and ideal for {productaudience} seeking {productbenefit} during {productusecase}.", + "This {adjectivedescriptive} product is crafted from {productmaterial} and includes {productfeature}, making it perfect for {productusecase} and providing {productbenefit} for {productaudience}.", + "For those who value {productbenefit}, this product is made from {productmaterial} and features {adjectivedescriptive} {productfeature}, ideal for {productaudience} during {productusecase}.", + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/school.go b/vendor/github.com/brianvoe/gofakeit/v7/data/school.go new file mode 100644 index 0000000000..a9772d634f --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/school.go @@ -0,0 +1,56 @@ +package data + +// School type and names +var School = map[string][]string{ + "type": {"Elementary School", "Middle School", "University", "High School", "Kindergarten", "Academy", "College", "Institute"}, + "isPrivate": {"Private", "State"}, + "name": {"Maplewood", + "Pineville", + "Riverside", + "Willowbrook", + "Crestwood", + "Sunset", + "Greenfield", + "Oakwood", + "Willowbrook", + "Hawthorn", + "Brookside", + "Pleasant View", + "Crescent Valley", + "Sycamore", + "Springfield", + "Meadowbrook", + "Greenwood", + "Riverbend", + "Valley Forge", + "Ridgeview", + "Cottonwood", + "Cedarwood", + "Golden Oak", + "Stonebridge", + "Harborview", + "Windsor", + "Northbrook", + "Sunset", + "Redwood Valley", + "Liberty", + "Washington Central", + "Franklin", + "Jefferson", + "Lincoln Park", + "Madison", + "Roosevelt", + "Westwood", + "Central Lakeside", + "Fairview", + "Heritage Hills", + "Kingsbridge", + "Harrisonville", + "Valley View", + "Hillside", + "Northridge", + "Brooklyn Heights", + "Oakridge", + "Countryside", + }, +} \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/sentence.go b/vendor/github.com/brianvoe/gofakeit/v7/data/sentence.go new file mode 100644 index 0000000000..e12319d824 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/sentence.go @@ -0,0 +1,5 @@ +package data + +var Sentence = map[string][]string{ + "phrase": {"what's yer poison", "time will tell", "I'm good", "nice to meet you", "spring forward, fall back", "what's your job", "once or twice", "you could have fooled me", "what's your name", "why not Zoidberg", "time you got a watch", "I'm Hindu", "fair play", "what's your phone number", "after the jump", "cease fire", "as ever", "I'm hot", "best of", "get well soon", "what's your poison", "when is closing time", "yes and amen", "you don't dip your pen in the company inkwell", "I'm hungry", "short of", "what's yours", "duces tecum", "after you", "yes and no", "I'm in love with you", "the pants off", "I'm Jewish", "few sandwiches short of a picnic", "shut the front door", "does a bear shit in the woods", "the party is over", "tomayto tomahto", "I'm looking for a grocery store", "does anyone here speak English", "heads I win, tails you lose", "I'm looking for a job", "stick a fork in it", "the penny drops", "I'm lost", "shut up and take my money", "mind you", "I'm married", "isn't it so", "wham-bam-thank-you-ma'am", "does not compute", "hold your fire", "pardon me", "mind your own beeswax", "I'm mute", "does someone look like", "I'm not being funny", "leave me alone", "going once, going twice, sold", "you get that", "I'm not interested", "talk about", "here be dragons", "always a bridesmaid, never a bride", "the plot thickens", "close, but no cigar", "I'm not religious", "ultra vires", "bound to", "always the bridesmaid, never the bride", "the plural of anecdote is not data", "I'm pregnant", "comedy equals tragedy plus time", "get you", "heads will roll", "all to the better", "I'm rubber, you're glue", "going to", "when push comes to shove", "you had to be there", "I'm scared", "you have beautiful eyes", "enjoy your meal", "I'm sick", "doesn't have both oars in the water", "you have the advantage of me", "here lies", "check is in the mail", "I'm single", "stick 'em up", "when the chips are down", "you just had to", "that'll be the day", "I'm sorry", "very good", "lather, rinse, repeat", "you kiss your mother with that mouth", "that'll do", "the rabbit died", "I'm straight", "in order for", "when the going gets weird, the weird turn pro", "I'm thirsty", "the rest is history", "it depends", "I'm tired", "in order to", "monkeys might fly out of my butt", "oh my life", "do want", "would it hurt", "you know what", "here you are", "all wool and a yard wide", "hit it", "pound for pound", "bottom falls out", "OK yah", "would it kill someone", "you know what I mean", "here you go", "alone in a crowd", "me neither", "chin up", "to be continued", "I'm twenty years old", "such is life", "off with someone's head", "Lord knows", "case closed", "you know what they say", "you've got to laugh", "ten points to Gryffindor", "that's a relief", "I'm worried", "kill the rabbit", "live and learn", "would not throw someone out of bed", "catch you later", "that's a wrap", "the rubber meets the road", "to be honest", "I'm your huckleberry", "off with their head", "you learn something new every day", "catch you on the flip side", "all your base are belong to us", "that's all", "horses for courses", "to be named later", "good night", "would you mind putting on your seat belt", "easy does it", "that's all she wrote", "me too", "oh noes", "that's for me to know and you to find out", "to be truthful", "still got one's communion money", "do you accept American dollars", "winner, winner, chicken dinner", "workers of the world, unite", "speak of the devil", "you must be fun at parties", "that's it", "hit me", "how about that", "ding, ding, ding, we have a winner", "do you accept credit cards", "word has it", "woulda, coulda, shoulda", "you must be new here", "how are you", "do you believe in God", "woulda, shoulda, coulda", "that's life", "safety in numbers", "how are you doing", "do you come here often", "worm has turned", "you never know", "that's my", "how are you getting along", "leave well enough alone", "do you have a boyfriend", "that's saying something", "the shoe is on the other foot", "this is someone", "do you have a girlfriend", "Lord only knows", "that's that", "check yourself before you wreck yourself", "this is the life", "how can you sleep at night", "wake up and die right", "do you have a menu in English", "that's the bunny", "the show must go on", "this is where we came in", "nod's as good as a wink to a blind bat", "wake up and smell the ashes", "on the huh", "do you have any brothers or sisters", "dogs bark", "worm turns", "that's the spirit", "this just in", "how did he die", "more like", "do you have any pets", "alright me babber", "Elvis has left the building", "this means war", "how do", "she could be his mother", "do you have children", "alright me lover", "that's the ticket", "how do I get to", "shoulda, coulda, woulda", "nome sane", "guess what", "whenever one turns around", "do you have Wi-Fi", "alright my babber", "the story goes", "how do I get to the airport", "shoulda, woulda, coulda", "do you kiss your mother with that mouth", "Lord willing and the creek don't rise", "you said it", "alright my lover", "how do I get to the bus station", "ask me one on sport", "need I say more", "sounds like a plan", "put that in your pipe and smoke it", "do you know", "take a picture, it will last longer", "the streets are paved with gold", "how do I get to the train station", "ask my arse", "stop the car", "do you know who I am", "wouldn't you know", "you shouldn't have", "how do ye do", "fans are slans", "use one's coconut", "bit by a barn mouse", "stick that in your pipe and smoke it", "do you mind", "but for the grace of God", "wouldn't you know it", "head in the sand", "the terrorists will have won", "how do you do", "please excuse my dear Aunt Sally", "much of a muchness", "bless someone's cotton socks", "do you need help", "or else", "dress for the slide, not the ride", "that's wassup", "the thick plottens", "much to be said", "bless someone's heart", "a blessing and a curse", "do you speak English", "you think", "that's what I'm talking about", "how do you like that", "art imitates life", "please help me", "five will get you ten", "do you think you can walk", "or so", "that's what she said", "the thing is", "how do you like them apples", "please pass the salt", "I've been robbed", "nature calls", "a boon and a bane", "but me no buts", "or something", "you welcome", "that's what's up", "how do you pronounce this word", "fare thee well", "please repeat after me", "I've been shot", "pot, meet kettle", "a boon or a bane", "where are the snows of yesteryear", "or what", "rolling in it", "the toilet is clogged", "how do you say...in English", "circle gets the square", "more than someone has had hot dinners", "please say that again", "I've burned myself", "different strokes", "where are the toilets", "or words to that effect", "you win", "how do you spell this word", "to hell with", "in virtue of which", "please sit down", "where are we", "out to", "am I right", "please speak more slowly", "I've lost my keys", "where are we going", "but who's counting", "you wish", "am I right or am I right", "how goes it", "methinks the lady doth protest too much", "please turn left", "could be written on the back of a postage stamp", "I've never heard it called that before", "where are you", "you wish, jellyfish", "am I under arrest", "methinks thou dost protest too much", "please turn right", "bang to rights", "gimme a break", "where are you from", "revenge is sweet", "'tis the season", "pull the other one", "where are your parents", "out with it", "have a good one", "how long is a piece of string", "ay up me duck", "before you can say Jack Robinson", "pull the other one, it's got bells on", "where away", "only time will tell", "could fit on the back of a postage stamp", "before you can say knife", "pull the other one, it's got brass bells on", "where can I find a hotel", "the wheels came off", "angel passes", "how many languages do you speak", "could go all day", "sleep tight", "nature vs nurture", "practice, practice, practice", "where do I sign up", "help is on the way", "many thanks", "the wheels came off the bus", "mercy bucket", "how many siblings do you have", "pleased to meet you", "could have fooled me", "where do you live", "the wheels came off the wagon", "mercy buckets", "where do you live at", "you'd better believe it", "than a bygod", "the wheels fell off", "could have, would have, should have", "where does it hurt", "hell if I know", "you'd complain if you were hung with a new rope", "the wheels fell off the bus", "every good boy deserves fudge", "could I see the menu, please", "where does this bus go", "help wanted", "the wheels fell off the wagon", "how much do I owe you", "where does this train go", "how much do you charge", "steady as she goes", "put the same shoe on every foot", "where have you been", "temper temper", "how much does it cost", "coulda, shoulda, woulda", "give credit where credit is due", "boom goes the dynamite", "where is the toilet", "how much is it", "in your dreams", "coulda, woulda, shoulda", "what a lovely day", "to save one's life", "exsqueeze me", "like a martin to his gourd", "what a pity", "you'll be late for your own funeral", "every man for himself", "size matters"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/song.go b/vendor/github.com/brianvoe/gofakeit/v7/data/song.go new file mode 100644 index 0000000000..0e275c8ee3 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/song.go @@ -0,0 +1,246 @@ +package data + +// Songs: Year-end Charts Hot 100 Songs from Billboard +// Source: https://www.billboard.com/charts/year-end/hot-100-songs/ + +// Artists: Greatest of All Time Artists based on Billboard rate +// Source: https://www.billboard.com/charts/greatest-of-all-time-artists/ + +var Songs = map[string][]string{ + "name": { + "A Bar Song (Tipsy)", + "A Holly Jolly Christmas", + "Act II: Date @ 8", + "Agora Hills", + "Ain't No Love In Oklahoma", + "All I Want For Christmas Is You", + "Austin", + "Beautiful Things", + "Birds Of A Feather", + "Bulletproof", + "Burn It Down", + "Carnival", + "Cowgirls", + "Cruel Summer", + "Dance The Night", + "Die With A Smile", + "Down Bad", + "End Of Beginning", + "Espresso", + "Euphoria", + "Everybody", + "Exes", + "FE!N", + "FTCU", + "Fast Car", + "Feather", + "First Person Shooter", + "Flowers", + "Fortnight", + "Fukumean", + "Gata Only", + "Get It Sexyy", + "Good Good", + "Good Luck, Babe!", + "Greedy", + "High Road", + "Hot To Go!", + "Houdini", + "Houdini", + "I Am Not Okay", + "I Can Do It With A Broken Heart", + "I Had Some Help", + "I Like The Way You Kiss Me", + "I Remember Everything", + "IDGAF", + "Is It Over Now?", + "Jingle Bell Rock", + "La Diabla", + "Last Christmas", + "Last Night", + "Lies Lies Lies", + "Like That", + "Lil Boo Thang", + "Lose Control", + "Lovin On Me", + "Lunch", + "Made For Me", + "Miles On It", + "Million Dollar Baby", + "Monaco", + "Need A Favor", + "Never Lose Me", + "Not Like Us", + "On My Mama", + "Paint The Town Red", + "Pink Skies", + "Please Please Please", + "Pour Me A Drink", + "Pretty Little Poison", + "Redrum", + "Rich Baby Daddy", + "Rockin' Around The Christmas Tree", + "Saturn", + "Save Me", + "Slow It Down", + "Snooze", + "Stargazing", + "Stick Season", + "Taste", + "Texas Hold 'Em", + "The Painter", + "Thinkin' Bout Me", + "Too Sweet", + "Truck Bed", + "Type Shit", + "Vampire", + "Wanna Be", + "Water", + "We Can't Be Friends (Wait For Your Love)", + "What Was I Made For?", + "Whatever She Wants", + "Where It Ends", + "Where The Wild Things Are", + "White Horse", + "Wild Ones", + "Wildflower", + "Wind Up Missin' You", + "World On Fire", + "Yeah Glo!", + "Yes, And?", + }, + "artist": { + "Adele", + "Aerosmith", + "Alicia Keys", + "Aretha Franklin", + "Barbra Streisand", + "Barry Manilow", + "Bee Gees", + "Beyonce", + "Billy Joel", + "Bob Dylan", + "Bob Seger", + "Bon Jovi", + "Boyz II Men", + "Britney Spears", + "Bruce Springsteen & The E Street Band", + "Bruno Mars", + "Bryan Adams", + "Carole King", + "Carpenters", + "Celine Dion", + "Chicago", + "Chris Brown", + "Commodores", + "Creedence Clearwater Revival", + "Daryl Hall John Oates", + "Def Leppard", + "Diana Ross", + "Donna Summer", + "Drake", + "Eagles", + "Earth, Wind & Fire", + "Ed Sheeran", + "Elton John", + "Elvis Presley", + "Eminem", + "Eric Clapton", + "Fleetwood Mac", + "Foreigner", + "Garth Brooks", + "Guns N' Roses", + "Heart", + "Herb Alpert", + "Huey Lewis & The News", + "JAY-Z", + "James Taylor", + "Janet Jackson", + "John Denver", + "John Mellencamp", + "Journey", + "Justin Bieber", + "Justin Timberlake", + "Kanye West", + "Katy Perry", + "Kelly Clarkson", + "Kenny Rogers", + "Lady Gaga", + "Led Zeppelin", + "Linda Ronstadt", + "Linkin Park", + "Lionel Richie", + "Madonna", + "Mariah Carey", + "Maroon 5", + "Marvin Gaye", + "Mary J. Blige", + "Michael Bolton", + "Michael Jackson", + "Miley Cyrus", + "Neil Diamond", + "Nelly", + "Nickelback", + "Olivia Newton-John", + "P!nk", + "Paul McCartney", + "Paula Abdul", + "Phil Collins", + "Pink Floyd", + "Prince", + "Queen", + "R. Kelly", + "Rihanna", + "Rod Stewart", + "Santana", + "Simon & Garfunkel", + "Stevie Wonder", + "Taylor Swift", + "The Beach Boys", + "The Beatles", + "The Black Eyed Peas", + "The Jacksons", + "The Monkees", + "The Rolling Stones", + "The Supremes", + "The Temptations", + "Three Dog Night", + "Tim McGraw", + "U2", + "Usher", + "Van Halen", + "Whitney Houston", + }, + "genre": { + "Acoustic Pop", + "Alternative Hip-Hop", + "Alternative Pop", + "Chillwave", + "Contemporary R&B", + "Country", + "Dancehall", + "Electro-pop", + "Electronic Dance Music (EDM)", + "Emo Rap", + "Funk", + "Gospel-inspired Pop", + "Hip-Hop", + "Indie Pop", + "Latin Pop", + "Lo-fi Hip-Hop", + "Melodic Rap", + "Pop", + "Pop Punk", + "Pop Rock", + "R&B", + "Rap", + "Reggaeton", + "Rock", + "Singer-Songwriter", + "Soul", + "Synthwave", + "Trap", + "Trap Soul", + "Urban Contemporary", + }, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/data/word.go b/vendor/github.com/brianvoe/gofakeit/v7/data/word.go new file mode 100644 index 0000000000..87b48dc887 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/data/word.go @@ -0,0 +1,83 @@ +package data + +import ( + "sort" +) + +var WordKeys []string + +func init() { + // Loop through Word and put togther a list of keys + for key := range Word { + WordKeys = append(WordKeys, key) + } + + // Sort the keys + sort.Strings(WordKeys) +} + +// Word consists of common english words +var Word = map[string][]string{ + // Nouns + "noun_common": {"time", "person", "year", "way", "day", "thing", "man", "world", "life", "hand", "part", "child", "eye", "woman", "place", "work", "week", "case", "point", "government", "company", "number", "group", "problem", "fact"}, + "noun_concrete": {"apple", "air", "conditioner", "airport", "ambulance", "aircraft", "apartment", "arrow", "antlers", "apro", "alligator", "architect", "ankle", "armchair", "aunt", "ball", "bermudas", "beans", "balloon", "bear", "blouse", "bed", "bow", "bread", "black", "board", "bones", "bill", "bitterness", "boxers", "belt", "brain", "buffalo", "bird", "baby", "book", "back", "butter", "bulb", "buckles", "bat", "bank", "bag", "bra", "boots", "blazer", "bikini", "bookcase", "bookstore", "bus stop", "brass", "brother", "boy", "blender", "bucket", "bakery", "bow", "bridge", "boat", "car", "cow", "cap", "cooker", "cheeks", "cheese", "credenza", "carpet", "crow", "crest", "chest", "chair", "candy", "cabinet", "cat", "coffee", "children", "cookware", "chaise longue", "chicken", "casino", "cabin", "castle", "church", "cafe", "cinema", "choker", "cravat", "cane", "costume", "cardigan", "chocolate", "crib", "couch", "cello", "cashier", "composer", "cave", "country", "computer", "canoe", "clock", "dog", "deer", "donkey", "desk", "desktop", "dress", "dolphin", "doctor", "dentist", "drum", "dresser", "designer", "detective", "daughter", "egg", "elephant", "earrings", "ears", "eyes", "estate", "finger", "fox", "frock", "frog", "fan", "freezer", "fish", "film", "foot", "flag", "factory", "father", "farm", "forest", "flower", "fruit", "fork", "grapes", "goat", "gown", "garlic", "ginger", "giraffe", "gauva", "grains", "gas station", "garage", "gloves", "glasses", "gift", "galaxy", "guitar", "grandmother", "grandfather", "governor", "girl", "guest", "hamburger", "hand", "head", "hair", "heart", "house", "horse", "hen", "horn", "hat", "hammer", "hostel", "hospital", "hotel", "heels", "herbs", "host", "jacket", "jersey", "jewelry", "jaw", "jumper", "judge", "juicer", "keyboard", "kid", "kangaroo", "koala", "knife", "lemon", "lion", "leggings", "leg", "laptop", "library", "lamb", "london", "lips", "lung", "lighter", "luggage", "lamp", "lawyer", "mouse", "monkey", "mouth", "mango", "mobile", "milk", "music", "mirror", "musician", "mother", "man", "model", "mall", "museum", "market", "moonlight", "medicine", "microscope", "newspaper", "nose", "notebook", "neck", "noodles", "nurse", "necklace", "noise", "ocean", "ostrich", "oil", "orange", "onion", "oven", "owl", "paper", "panda", "pants", "palm", "pasta", "pumpkin", "pharmacist", "potato", "parfume", "panther", "pad", "pencil", "pipe", "police", "pen", "pharmacy", "police station", "parrot", "plane", "pigeon", "phone", "peacock", "pencil", "pig", "pouch", "pagoda", "pyramid", "purse", "pancake", "popcorn", "piano", "physician", "photographer", "professor", "painter", "park", "plant", "parfume", "radio", "razor", "ribs", "rainbow", "ring", "rabbit", "rice", "refrigerator", "remote", "restaurant", "road", "surgeon", "scale", "shampoo", "sink", "salt", "shark", "sandals", "shoulder", "spoon", "soap", "sand", "sheep", "sari", "stomach", "stairs", "soup", "shoes", "scissors", "sparrow", "shirt", "suitcase", "stove", "stairs", "snowman", "shower", "swan", "suit", "sweater", "smoke", "skirt", "sofa", "socks", "stadium", "skyscraper", "school", "sunglasses", "sandals", "slippers", "shorts", "sandwich", "strawberry", "spaghetti", "shrimp", "saxophone", "sister", "son", "singer", "senator", "street", "supermarket", "swimming pool", "star", "sky", "sun", "spoon", "ship", "smile", "table", "turkey", "tie", "toes", "truck", "train", "taxi", "tiger", "trousers", "tongue", "television", "teacher", "turtle", "tablet", "train station", "toothpaste", "tail", "theater", "trench coat", "tea", "tomato", "teen", "tunnel", "temple", "town", "toothbrush", "tree", "toy", "tissue", "telephone", "underwear", "uncle", "umbrella", "vest", "voice", "veterinarian", "villa", "violin", "village", "vehicle", "vase", "wallet", "wolf", "waist", "wrist", "water melon", "whale", "water", "wings", "whisker", "watch", "woman", "washing machine", "wheelchair", "waiter", "wound", "xylophone", "zebra", "zoo"}, + "noun_abstract": {"fiction", "horror", "dream", "luck", "movement", "right", "clarity", "joy", "care", "trend", "belief", "sorrow", "joy", "failure", "slavery", "riches", "fashion", "envy", "success", "fear", "union", "luxury", "freedom", "generosity", "wit", "peace", "hatred", "thrill", "brilliance", "care", "wealth", "religion", "divorce", "goal", "stupidity", "friendship", "goodness", "rhythm", "timing", "infancy", "disregard", "riches", "appetite", "loneliness", "pleasure", "love", "beauty", "annoyance", "kindness", "nap", "gain", "talent", "religion", "lie", "truth", "solitude", "justice", "bravery", "calm", "childhood", "confusion", "ability", "loss", "thought", "growth", "cleverness", "anger", "horror", "marriage", "delay", "philosophy", "generation", "wisdom", "dishonesty", "happiness", "coldness", "poverty", "brilliance", "luxuty", "sleep", "awareness", "idea", "disregard", "slavery", "growth", "company", "irritation", "advantage", "mercy", "speed", "pain", "gossip", "crime", "comfort", "frailty", "life", "patience", "omen", "deceit", "elegance"}, + "noun_collective_people": {"band", "troupe", "dynasty", "group", "bevy", "staff", "crowd", "party", "board", "regiment", "crew", "tribe", "body", "patrol", "congregation", "pack", "bunch", "company", "team", "mob", "caravan", "line", "troop", "choir", "host", "posse", "class", "gang", "horde"}, + "noun_collective_animal": {"cackle", "mustering", "mob", "wisp", "pod", "bale", "murder", "muster", "brace", "exaltation", "party", "flock", "cast", "sedge", "stand", "scold", "team", "covey", "trip", "army", "school", "nest", "leap", "host", "troop"}, + "noun_collective_thing": {"wad", "pair", "album", "string", "anthology", "reel", "outfit", "fleet", "comb", "archipelago", "quiver", "bale", "packet", "hedge", "basket", "orchard", "batch", "library", "battery", "set", "harvest", "block", "forest", "book", "group", "bouquet", "collection", "bowl", "stack", "bunch", "hand", "bundle", "catalog", "shower", "ream", "chest", "heap", "range", "cluster", "pack", "hail", "cloud", "galaxy", "sheaf", "clump"}, + "noun_countable": {"camp", "hospital", "shirt", "sock", "plant", "cup", "fork", "spoon", "plate", "straw", "town", "box", "bird", "father", "answer", "egg", "purse", "mirror", "mistake", "toilet", "toothbrush", "shower", "towel", "pool", "corner", "card", "lawn", "city", "egg", "yard", "burger", "kilometer", "mile", "father", "film", "actor", "issue", "machine", "liter", "room", "station", "journey", "castle", "hour", "finger", "boy", "book", "year", "second", "son", "month", "group", "hall", "cat", "week", "picture", "day", "village", "effect", "baby", "weekend", "class", "meal", "river", "grade", "bush", "desk", "stream", "method", "brother", "sister", "factory", "aunt", "bush", "program", "uncle", "ball", "cousin", "wall", "grandmother", "cup", "grandfather", "week", "school", "shirt", "child", "king", "road", "judge", "bridge", "car", "line", "book", "eye", "teacher", "foot", "party", "face", "day", "chest", "handle", "week", "hotel", "eye", "animal", "doctor", "adult", "village", "key", "bird", "bank", "program", "idea", "gun", "card", "brother", "dress", "room", "door", "mouth", "club", "game", "ring", "project", "sister", "road", "coat", "account", "group", "cigarette", "farm", "river", "college", "computer", "walk", "corner", "cat", "head", "street", "election", "country", "chair", "crowd", "cup", "plant", "farm", "handle", "model", "book", "message", "battle", "pen", "pencil", "elephant", "carrot", "onion", "garden", "country", "engine", "bill", "apple", "noun", "club", "crowd", "window", "field", "friend", "verb", "class", "flower", "seed", "lake", "plant", "animal", "ocean", "whale", "fish", "stream", "cloud", "couch", "steak", "problem", "light", "door", "room", "painting", "shop", "apartment", "candle", "adult", "building", "plan", "page", "ball", "game", "animal", "apartment", "box", "thought", "walk", "lady", "bottle", "article", "game", "kettle", "car", "house", "hoses", "orange", "phone", "app", "window", "door", "dollar", "foot", "cent", "library", "cat", "bed", "pound", "gate", "tomatoes", "gun", "holiday", "woman", "job", "shock", "salary", "tax", "coat", "scooter", "dog", "problem", "field", "answer", "ear", "camp", "case", "road", "woman", "product", "bridge", "man", "dream", "idea", "scheme", "invention", "cigarette", "mother", "friend", "chapter", "computer", "dream", "father", "child", "motor", "deskpath", "factory", "park", "newspaper", "hat", "dream", "table", "kitchen", "student", "captain", "doctor", "bus", "neck", "class", "list", "member", "chest", "valley", "product", "horse", "captain", "star", "hour", "page", "bus", "girl", "month", "child", "house", "boy", "bill", "kitchen", "chapter", "boat", "hand", "dress", "table", "wall", "chair", "train", "minute", "magazine", "bus", "party", "bird", "lake", "job", "nation", "bike", "election", "hand", "box", "beach", "address", "project", "task", "park", "face", "college", "bell", "plane", "store", "hall", "accident", "daughter", "ship", "candy", "smile", "city", "island", "case", "spot", "film", "husband", "artist", "tour", "bag", "boat", "driver", "office", "chair", "path", "dog", "bag", "finger", "apartment", "garden", "heart", "year", "engine", "girl", "day", "castle", "plane", "ring", "brother", "edge", "picture", "meeting", "tent", "dog", "hat", "head", "bottle", "hill"}, + "noun_uncountable": {"accommodation", "advertising", "air", "aid", "advice", "anger", "art", "assistance", "bread", "business", "butter", "calm", "cash", "chaos", "cheese", "childhood", "clothing", "coffee", "content", "corruption", "courage", "currency", "damage", "danger", "darkness", "data", "determination", "economics", "education", "electricity", "employment", "energy", "entertainment", "enthusiasm", "equipment", "evidence", "failure", "fame", "fire", "flour", "food", "freedom", "friendship", "fuel", "furniture", "fun", "genetics", "gold", "grammar", "guilt", "hair", "happiness", "harm", "health", "heat", "help", "homework", "honesty", "hospitality", "housework", "humour", "imagination", "importance", "information", "innocence", "intelligence", "jealousy", "juice", "justice", "kindness", "knowledge", "labour", "lack", "laughter", "leisure", "literature", "litter", "logic", "love", "luck", "magic", "management", "metal", "milk", "money", "motherhood", "motivation", "music", "nature", "news", "nutrition", "obesity", "oil", "old age", "oxygen", "paper", "patience", "permission", "pollution", "poverty", "power", "pride", "production", "progress", "pronunciation", "publicity", "punctuation", "quality", "quantity", "racism", "rain", "relaxation", "research", "respect", "rice", "room (space)", "rubbish", "safety", "salt", "sand", "seafood", "shopping", "silence", "smoke", "snow", "software", "soup", "speed", "spelling", "stress", "sugar", "sunshine", "tea", "tennis", "time", "tolerance", "trade", "traffic", "transportation", "travel", "trust", "understanding", "unemployment", "usage", "violence", "vision", "warmth", "water", "wealth", "weather", "weight", "welfare", "wheat", "width", "wildlife", "wisdom", "wood", "work", "yoga", "youth"}, + "noun_determiner": {"the", "a", "an", "this", "that", "these", "those", "my", "your", "his", "her", "its", "our", "their", "some", "any", "each", "every", "certain"}, + //"noun_proper": {}, // This refers to an actual person(John Doe), place(Chipotle, Tennessee) + + // Verbs + "verb_action": {"ride", "sit", "stand", "fight", "laugh", "read", "play", "listen", "cry", "think", "sing", "watch", "dance", "turn", "win", "fly", "cut", "throw", "sleep", "close", "open", "write", "give", "jump", "eat", "drink", "cook", "wash", "wait", "climb", "talk", "crawl", "dream", "dig", "clap", "knit", "sew", "smell", "kiss", "hug", "snore", "bathe", "bow", "paint", "dive", "ski", "stack", "buy", "shake"}, + "verb_transitive": {"accept", "acknowledge", "admit", "aggravate", "answer", "ask", "avoid", "beat", "bend", "bless", "bother", "break", "brush", "build", "cancel", "capture", "carry", "catch", "change", "chase", "chastise", "clean", "collect", "comfort", "contradict", "convert", "crack", "dazzle", "deceive", "define", "describe", "destroy", "discover", "distinguish", "drag", "dress", "dunk", "edify", "embarrass", "embrace", "enable", "encourage", "enlist", "entertain", "execute", "fascinate", "finish", "flick", "follow", "forget", "forgive", "freeze", "frighten", "furnish", "gather", "grab", "grasp", "grease", "grip", "handle", "hang", "head", "help", "highlight", "honour", "hurry", "hurt", "imitate", "impress", "indulge", "insert", "inspect", "interest", "interrupt", "intimidate", "involve", "irritate", "join", "judge", "keep", "key", "kill", "kiss", "knock", "lag", "lay", "lead", "lean", "leave", "lighten", "limit", "link", "load", "love", "lower", "maintain", "marry", "massage", "melt", "mock", "munch", "murder", "notice", "number", "offend", "order", "page", "paralyze", "persuade", "petrify", "pierce", "place", "please", "poison", "possess", "prepare", "promise", "protect", "punch", "purchase", "puzzle", "question", "quit", "raise", "reassure", "recognise", "refill", "remind", "remove", "repel", "research", "retard", "ring", "run", "satisfy", "scold", "select", "slap", "smell", "soften", "specify", "spell", "spit", "spread", "strike", "surprise", "swallow", "switch", "taste", "teach", "tickle", "tighten", "toast", "toss", "transform", "try", "turn", "tweak", "twist", "understand", "understimate", "unload", "unlock", "untie", "upgrade", "use", "vacate", "videotape", "vilify", "viplate", "wake", "want", "warm", "warn", "wash", "watch", "wear", "weep", "widen", "win", "wipe", "wrack", "wrap", "wreck"}, + "verb_intransitive": {"agree", "appear", "arrive", "become", "belong", "collapse", "consist", "cost", "cough", "cry", "depend", "die", "disappear", "emerge", "exist", "explode", "fade", "fall", "fast", "float", "fly", "gallop", "go", "grow", "happen", "have", "hiccup", "inquire", "jump", "kneel", "knock", "last", "laugh", "lead", "lean", "leap", "learn", "left", "lie", "limp", "listen", "live", "look", "march", "mourn", "move", "occur", "panic", "party", "pause", "peep", "pose", "pounce", "pout", "pray", "preen", "read", "recline", "relax", "relent", "remain", "respond", "result", "revolt", "rise", "roll", "run", "rush", "sail", "scream", "shake", "shout", "sigh", "sit", "skip", "sleep", "slide", "smell", "smile", "snarl", "sneeze", "soak", "spin", "spit", "sprint", "squeak", "stagger", "stand", "stay", "swim", "swing", "twist", "vanish", "vomit", "wade", "wait", "wake", "walk", "wander", "wave", "whirl", "wiggle", "work", "yell"}, + "verb_linking": {"am", "is", "was", "are", "were", "being", "been", "be", "have", "has", "had", "do", "does", "did", "shall", "will", "should", "would", "may", "might", "must", "can", "could"}, + "verb_helping": {"is", "can", "be", "do", "may", "had", "should", "was", "has", "could", "are", "will", "been", "did", "might", "were", "does", "must", "have", "would", "am", "shall", "being"}, + + // Adverbs + "adverb_manner": {"accidentally", "angrily", "anxiously", "awkwardly", "badly", "beautifully", "blindly", "boldly", "bravely", "brightly", "busily", "calmly", "carefully", "carelessly", "cautiously", "cheerfully", "clearly", "closely", "correctly", "courageously", "cruelly", "daringly", "deliberately", "doubtfully", "eagerly", "easily", "elegantly", "enormously", "enthusiastically", "equally", "eventually", "exactly", "faithfully", "fast", "fatally", "fiercely", "fondly", "foolishly", "fortunately", "frankly", "frantically", "generously", "gently", "gladly", "gracefully", "greedily", "happily", "hard", "hastily", "healthily", "honestly", "hungrily", "hurriedly", "inadequately", "ingeniously", "innocently", "inquisitively", "irritably", "joyously", "justly", "kindly", "lazily", "loosely", "loudly", "madly", "mortally", "mysteriously", "neatly", "nervously", "noisily", "obediently", "openly", "painfully", "patiently", "perfectly", "politely", "poorly", "powerfully", "promptly", "punctually", "quickly", "quietly", "rapidly", "rarely", "really", "recklessly", "regularly", "reluctantly", "repeatedly", "rightfully", "roughly", "rudely", "sadly", "safely", "selfishly", "sensibly", "seriously", "sharply", "shyly", "silently", "sleepily", "slowly", "smoothly", "so", "softly", "solemnly", "speedily", "stealthily", "sternly", "straight", "stupidly", "successfully", "suddenly", "suspiciously", "swiftly", "tenderly", "tensely", "thoughtfully", "tightly", "truthfully", "unexpectedly", "victoriously", "violently", "vivaciously", "warmly", "weakly", "wearily", "well", "wildly", "wisely"}, + "adverb_degree": {"almost", "absolutely", "awfully", "badly", "barely", "completely", "decidedly", "deeply", "enough", "enormously", "entirely", "extremely", "fairly", "far", "fully", "greatly", "hardly", "highly", "how", "incredibly", "indeed", "intensely", "just", "least", "less", "little", "lots", "most", "much", "nearly", "perfectly", "positively", "practically", "pretty", "purely", "quite", "rather", "really", "scarcely", "simply", "so", "somewhat", "strongly", "terribly", "thoroughly", "too", "totally", "utterly", "very", "virtually", "well"}, + "adverb_place": {"about", "above", "abroad", "anywhere", "away", "back", "backwards", "behind", "below", "down", "downstairs", "east", "elsewhere", "far", "here", "in", "indoors", "inside", "near", "nearby", "off", "on", "out", "outside", "over", "there", "towards", "under", "up", "upstairs", "where"}, + "adverb_time_definite": {"now", "then", "today", "tomorrow", "tonight", "yesterday"}, + "adverb_time_indefinite": {"already", "before", "early", "earlier", "eventually", "finally", "first", "formerly", "just", "last", "late", "later", "lately", "next", "previously", "recently", "since", "soon", "still", "yet"}, + "adverb_frequency_definite": {"annually", "daily", "fortnightly", "hourly", "monthly", "nightly", "quarterly", "weekly", "yearly"}, + "adverb_frequency_indefinite": {"always", "constantly", "ever", "frequently", "generally", "infrequently", "never", "normally", "occasionally", "often", "rarely", "regularly", "seldom", "sometimes", "regularly", "usually"}, + + // Prepositions + "preposition_simple": {"at", "by", "as", "but", "from", "for", "into", "in", "than", "of", "off", "on", "out", "over", "till", "to", "up", "upon", "with", "under", "down"}, + "preposition_double": {"outside of", "out of", "upon", "within", "inside", "without", "onto", "from behind", "because of", "out of", "throughout", "up to", "before", "due to", "according to", "from beneath", "next to", "from above"}, + "preposition_compound": {"according to", "as to", "onto", "across", "after", "beyond", "without", "opposite to", "away from", "aside from", "in favor of", "in front of", "because of", "as for", "near to", "behind", "along", "outside", "on account of", "on behalf of", "but for", "ahead of", "close to", "despite", "depending on", "due to", "in addition to", "next to", "in between", "in case of", "owing to", "along with", "around", "between", "apart from", "in return for", "out of", "instead of", "outside of", "other than", "together with", "up to", "above", "about"}, + + // Adjectives + "adjective_descriptive": {"adorable", "adventurous", "agreeable", "alive", "aloof", "amused", "angry", "annoying", "anxious", "arrogant", "ashamed", "attractive", "auspicious", "awful", "bad", "beautiful", "black", "blue", "blushing", "bored", "brave", "bright", "brown", "busy", "calm", "careful", "cautious", "charming", "cheerful", "clean", "clear", "clever", "clumsy", "colorful", "comfortable", "concerning", "condemned", "confusing", "cooperative", "courageous", "creepy", "crowded", "cruel", "curios", "cute", "dangerous", "dark", "defiant", "delightful", "difficult", "disgusting", "distinct", "disturbed", "dizzying", "drab", "dull", "eager", "easy", "elated", "elegant", "embarrassed", "enchanted", "encouraging", "energetic", "enthusiastic", "envious", "evil", "exciting", "expensive", "exuberant", "faithful", "famous", "fancy", "fantastic", "fierce", "filthy", "fine", "foolish", "fragile", "frail", "frantic", "friendly", "frightening", "funny", "gentle", "gifted", "glamorous", "gleaming", "glorious", "good", "gorgeous", "graceful", "green", "grieving", "grumpy", "handsome", "happy", "healthy", "helpful", "helpless", "hilarious", "homeless", "horrible", "hungry", "hurt", "ill", "important", "impossible", "impromptu", "improvised", "inexpensive", "innocent", "inquiring", "itchy", "jealous", "jittery", "joyous", "kind", "knightly", "lazy", "lemony", "light", "lingering", "lively", "lonely", "long", "lovely", "lucky", "magnificent", "modern", "motionless", "muddy", "mushy", "mysterious", "naughty", "niche", "nervous", "nice", "nutty", "obedient", "obnoxious", "odd", "open", "orange", "outrageous", "outstanding", "panicked", "perfect", "pink", "plain", "pleasant", "poised", "poor", "powerless", "precious", "prickling", "proud", "purple", "puzzled", "quaint", "queer", "quizzical", "realistic", "red", "relieved", "repelling", "repulsive", "rich", "scary", "scenic", "selfish", "shiny", "shy", "silly", "sleepy", "smiling", "smoggy", "sore", "sparkly", "splendid", "spotted", "stormy", "strange", "stupid", "successful", "super", "talented", "tame", "tasty", "tender", "tense", "terse", "terrible", "thankful", "thoughtful", "tired", "tough", "troubling", "ugly", "uninterested", "unusual", "upset", "uptight", "varied", "vast", "victorious", "wandering", "weary", "white", "wicked", "wide", "wild", "witty", "worrisome", "wrong", "yellow", "young", "zealous"}, + "adjective_quantitative": {"a little", "a little bit", "a lot", "abundant", "all", "any", "couple", "double", "each", "either", "empty", "enough", "enough of", "every", "few", "full", "great", "half", "heavily", "heavy", "huge", "hundred", "hundreds", "insufficient", "light", "little", "lots of", "many", "most", "much", "neither", "no", "numerous", "plenty of", "several", "significant", "single", "so few", "some", "sparse", "substantial", "sufficient", "too", "whole"}, + "adjective_proper": {"Afghan", "African", "Alaskan", "Alpine", "Amazonian", "American", "Antarctic", "Aristotelian", "Asian", "Atlantean", "Atlantic", "Bahamian", "Bahrainean", "Balinese", "Bangladeshi", "Barbadian", "Barcelonian", "Beethovenian", "Belgian", "Beninese", "Bismarckian", "Brazilian", "British", "Buddhist", "Burkinese", "Burmese", "Caesarian", "Californian", "Cambodian", "Canadian", "Chinese", "Christian", "Colombian", "Confucian", "Congolese", "Cormoran", "Costa Rican", "Cypriot", "Danish", "Darwinian", "Diabolical", "Dutch", "Ecuadorian", "Egyptian", "Einsteinian", "Elizabethan", "English", "Finnish", "French", "Freudian", "Gabonese", "Gaussian", "German", "Greek", "Guyanese", "Himalayan", "Hindu", "Hitlerian", "Honduran", "Icelandic", "Indian", "Indonesian", "Intelligent", "Iranian", "Iraqi", "Italian", "Japanese", "Jungian", "Kazakh", "Korean", "kuban", "Kyrgyz", "Laotian", "Lebanese", "Lilliputian", "Lincolnian", "Machiavellian", "Madagascan", "Malagasy", "Marxist", "Mayan", "Mexican", "Middle Eastern", "Monacan", "Mozartian", "Muscovite", "Nepalese", "Newtonian", "Norwegian", "Orwellian", "Pacific", "Parisian", "Peruvian", "Philippine", "Plutonian", "Polish", "Polynesian", "Portuguese", "Putinist", "Roman", "Romanian", "Rooseveltian", "Russian", "Salvadorean", "Sammarinese", "Senegalese", "Shakespearean", "Slovak", "Somali", "South American", "Spanish", "Spanish", "Sri-Lankan", "Sudanese", "Swazi", "Swiss", "Taiwanese", "Thai", "Thatcherite", "Tibetan", "Torontonian", "Turkish", "Turkishish", "Turkmen", "Uzbek", "Victorian", "Viennese", "Vietnamese", "Welsh"}, + "adjective_demonstrative": {"this", "that", "these", "those", "it", "here", "there", "over there"}, + "adjective_possessive": {"my", "your", "his", "her", "its", "our", "their"}, + "adjective_interrogative": {"what", "whose", "where", "why", "how", "which"}, + "adjective_indefinite": {"all", "any", "anything", "everyone", "few", "nobody", "one", "some", "someone", "everybody", "anyone", "each", "everything", "many", "none", "several", "somebody"}, + + // Pronouns + "pronoun_personal": {"I", "we", "you", "he", "she", "it", "they"}, + "pronoun_object": {"me", "us", "you", "her", "him", "it", "them"}, + "pronoun_possessive": {"mine", "ours", "yours", "hers", "his", "theirs"}, + "pronoun_reflective": {"myself", "yourself", "herself", "himself", "itself", "ourselves", "yourselves", "themselves"}, + "pronoun_indefinite": {"all", "another", "any", "anybody", "anyone", "anything", "both", "each", "either", "everybody", "everyone", "everything", "few", "many", "most", "neither", "nobody", "none", "no one", "nothing", "one", "other", "others", "several", "some", "somebody", "someone", "something", "such"}, + "pronoun_demonstrative": {"this", "that", "these", "those"}, + "pronoun_interrogative": {"who", "whom", "which", "what", "whose", "where", "when", "why", "how"}, + "pronoun_relative": {"as", "that", "what", "whatever", "which", "whichever", "who", "whoever", "whom", "whomever", "whose"}, + + // Connectives + "connective_time": {"after a while", "afterwards", "at once", "at this moment", "at this point", "before that", "finally", "first", "here", "in the end", "lastly", "later on", "meanwhile", "next", "next time", "now", "on another occasion", "previously", "since", "soon", "straightaway", "then", "until then", "when", "whenever", "while"}, + "connective_comparative": {"additionally", "also", "as well", "even", "furthermore", "in addition", "indeed", "let alone", "moreover", "not only", "alternatively", "anyway", "but", "by contrast", "differs from", "elsewhere", "even so", "however", "in contrast", "in fact", "in other respects", "in spite of this", "in that respect", "instead", "nevertheless", "on the contrary", "on the other hand", "rather", "though", "whereas", "yet", "after all", "anyway", "besides", "moreover"}, + "connective_complaint": {"besides", "e.g.", "for example", "for instance", "i.e.", "in other words", "in that", "that is to say"}, + "connective_listing": {"firstly", "secondly", "first of all", "finally", "lastly", "for one thing", "for another", "in the first place", "to begin with", "next", "in summation", "to conclude"}, + "connective_casual": {"accordingly", "all the same", "an effect of", "an outcome of", "an upshot of", "as a consequence of", "as a result of", "because", "caused by", "consequently", "despite this", "even though", "hence", "however", "in that case", "moreover", "nevertheless", "otherwise", "so", "so as", "stemmed from", "still", "then", "therefore", "though", "under the circumstances", "yet"}, + "connective_examplify": {"accordingly", "as a result", "as exemplified by", "consequently", "for example", "for instance", "for one thing", "including", "provided that", "since", "so", "such as", "then", "therefore", "these include", "through", "unless", "without"}, + + // Misc + "interjection": {"wow", "hey", "oops", "ouch", "yay", "aha", "eek", "huh", "hmm", "whoa", "yikes", "phew", "gee", "alas", "bravo"}, +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/datetime.go b/vendor/github.com/brianvoe/gofakeit/v7/datetime.go new file mode 100644 index 0000000000..6723f8affa --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/datetime.go @@ -0,0 +1,699 @@ +package gofakeit + +import ( + "strconv" + "strings" + "time" +) + +var currentYear = time.Now().Year() + +// Date will generate a random time.Time struct +func Date() time.Time { return date(GlobalFaker) } + +// Date will generate a random time.Time struct +func (f *Faker) Date() time.Time { return date(f) } + +func date(f *Faker) time.Time { + return time.Date(year(f), time.Month(month(f)), day(f), hour(f), minute(f), second(f), nanoSecond(f), time.UTC) +} + +// PastDate will generate a random past time.Time struct +func PastDate() time.Time { return pastDate(GlobalFaker) } + +// PastDate will generate a random past time.Time struct +func (f *Faker) PastDate() time.Time { return pastDate(f) } + +func pastDate(f *Faker) time.Time { + return time.Now().Add(time.Hour * -time.Duration(number(f, 1, 12))) +} + +// FutureDate will generate a random future time.Time struct +func FutureDate() time.Time { return futureDate(GlobalFaker) } + +// FutureDate will generate a random future time.Time struct +func (f *Faker) FutureDate() time.Time { return futureDate(f) } + +func futureDate(f *Faker) time.Time { + return time.Now().Add(time.Hour * time.Duration(number(f, 1, 12))) +} + +// DateRange will generate a random time.Time struct between a start and end date +func DateRange(start, end time.Time) time.Time { return dateRange(GlobalFaker, start, end) } + +// DateRange will generate a random time.Time struct between a start and end date +func (f *Faker) DateRange(start, end time.Time) time.Time { return dateRange(f, start, end) } + +func dateRange(f *Faker, start time.Time, end time.Time) time.Time { + return time.Unix(0, int64(number(f, int(start.UnixNano()), int(end.UnixNano())))).UTC() +} + +// NanoSecond will generate a random nano second +func NanoSecond() int { return nanoSecond(GlobalFaker) } + +// NanoSecond will generate a random nano second +func (f *Faker) NanoSecond() int { return nanoSecond(f) } + +func nanoSecond(f *Faker) int { return number(f, 0, 999999999) } + +// Second will generate a random second +func Second() int { return second(GlobalFaker) } + +// Second will generate a random second +func (f *Faker) Second() int { return second(f) } + +func second(f *Faker) int { return number(f, 0, 59) } + +// Minute will generate a random minute +func Minute() int { return minute(GlobalFaker) } + +// Minute will generate a random minute +func (f *Faker) Minute() int { return minute(f) } + +func minute(f *Faker) int { return number(f, 0, 59) } + +// Hour will generate a random hour - in military time +func Hour() int { return hour(GlobalFaker) } + +// Hour will generate a random hour - in military time +func (f *Faker) Hour() int { return hour(f) } + +func hour(f *Faker) int { return number(f, 0, 23) } + +// Day will generate a random day between 1 - 31 +func Day() int { return day(GlobalFaker) } + +// Day will generate a random day between 1 - 31 +func (f *Faker) Day() int { return day(f) } + +func day(f *Faker) int { return number(f, 1, 31) } + +// WeekDay will generate a random weekday string (Monday-Sunday) +func WeekDay() string { return weekDay(GlobalFaker) } + +// WeekDay will generate a random weekday string (Monday-Sunday) +func (f *Faker) WeekDay() string { return weekDay(f) } + +func weekDay(f *Faker) string { return time.Weekday(number(f, 0, 6)).String() } + +// Month will generate a random month int +func Month() int { return month(GlobalFaker) } + +// Month will generate a random month int +func (f *Faker) Month() int { return month(f) } + +func month(f *Faker) int { return number(f, 1, 12) } + +// MonthString will generate a random month string +func MonthString() string { return monthString(GlobalFaker) } + +// MonthString will generate a random month string +func (f *Faker) MonthString() string { return monthString(f) } + +func monthString(f *Faker) string { return time.Month(number(f, 1, 12)).String() } + +// Year will generate a random year between 1900 - current year +func Year() int { return year(GlobalFaker) } + +// Year will generate a random year between 1900 - current year +func (f *Faker) Year() int { return year(f) } + +func year(f *Faker) int { return number(f, 1900, currentYear) } + +// TimeZone will select a random timezone string +func TimeZone() string { return timeZone(GlobalFaker) } + +// TimeZone will select a random timezone string +func (f *Faker) TimeZone() string { return timeZone(f) } + +func timeZone(f *Faker) string { return getRandValue(f, []string{"timezone", "text"}) } + +// TimeZoneFull will select a random full timezone string +func TimeZoneFull() string { return timeZoneFull(GlobalFaker) } + +// TimeZoneFull will select a random full timezone string +func (f *Faker) TimeZoneFull() string { return timeZoneFull(f) } + +func timeZoneFull(f *Faker) string { return getRandValue(f, []string{"timezone", "full"}) } + +// TimeZoneRegion will select a random region style timezone string, e.g. "America/Chicago" +func TimeZoneRegion() string { return timeZoneRegion(GlobalFaker) } + +// TimeZoneRegion will select a random region style timezone string, e.g. "America/Chicago" +func (f *Faker) TimeZoneRegion() string { return timeZoneRegion(f) } + +func timeZoneRegion(f *Faker) string { return getRandValue(f, []string{"timezone", "region"}) } + +// TimeZoneAbv will select a random timezone abbreviation string +func TimeZoneAbv() string { return timeZoneAbv(GlobalFaker) } + +// TimeZoneAbv will select a random timezone abbreviation string +func (f *Faker) TimeZoneAbv() string { return timeZoneAbv(f) } + +func timeZoneAbv(f *Faker) string { return getRandValue(f, []string{"timezone", "abr"}) } + +// TimeZoneOffset will select a random timezone offset +func TimeZoneOffset() float32 { return timeZoneOffset(GlobalFaker) } + +// TimeZoneOffset will select a random timezone offset +func (f *Faker) TimeZoneOffset() float32 { return timeZoneOffset(f) } + +func timeZoneOffset(f *Faker) float32 { + value, _ := strconv.ParseFloat(getRandValue(f, []string{"timezone", "offset"}), 32) + return float32(value) +} + +// javaDateTimeFormatToGolangFormat converts java date/time format into go date/time format +func javaDateTimeFormatToGolangFormat(format string) string { + format = strings.Replace(format, "ddd", "_2", -1) + format = strings.Replace(format, "dd", "02", -1) + format = strings.Replace(format, "d", "2", -1) + + format = strings.Replace(format, "HH", "15", -1) + format = strings.Replace(format, "H", "15", -1) + + format = strings.Replace(format, "hh", "03", -1) + format = strings.Replace(format, "h", "3", -1) + + format = strings.Replace(format, "mm", "04", -1) + format = strings.Replace(format, "m", "4", -1) + + format = strings.Replace(format, "ss", "05", -1) + format = strings.Replace(format, "s", "5", -1) + + format = strings.Replace(format, "yyyy", "2006", -1) + format = strings.Replace(format, "yy", "06", -1) + format = strings.Replace(format, "y", "06", -1) + + format = strings.Replace(format, "SSS", "000", -1) + + format = strings.Replace(format, "a", "pm", -1) + format = strings.Replace(format, "aa", "PM", -1) + + format = strings.Replace(format, "MMMM", "January", -1) + format = strings.Replace(format, "MMM", "Jan", -1) + format = strings.Replace(format, "MM", "01", -1) + format = strings.Replace(format, "M", "1", -1) + + format = strings.Replace(format, "ZZ", "-0700", -1) + + if !strings.Contains(format, "Z07") { + format = strings.Replace(format, "Z", "-07", -1) + } + + format = strings.Replace(format, "zz:zz", "Z07:00", -1) + format = strings.Replace(format, "zzzz", "Z0700", -1) + format = strings.Replace(format, "z", "MST", -1) + + format = strings.Replace(format, "EEEE", "Monday", -1) + format = strings.Replace(format, "E", "Mon", -1) + + return format +} + +func addDateTimeLookup() { + AddFuncLookup("date", Info{ + Display: "Date", + Category: "datetime", + Description: "Representation of a specific day, month, and year, often used for chronological reference", + Example: "2006-01-02T15:04:05Z07:00", + Output: "string", + Aliases: []string{ + "date string", "calendar date", "datetime", "timestamp", "chronological reference", + }, + Keywords: []string{ + "date", "time", "day", "month", "year", "format", "rfc3339", "iso8601", "utc", + }, + Params: []Param{ + { + Field: "format", + Display: "Format", + Type: "string", + Default: "RFC3339", + Options: []string{"ANSIC", "UnixDate", "RubyDate", "RFC822", "RFC822Z", "RFC850", "RFC1123", "RFC1123Z", "RFC3339", "RFC3339Nano"}, + Description: "Date time string format output. You may also use golang time format or java time format", + }, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + format, err := info.GetString(m, "format") + if err != nil { + return nil, err + } + + switch format { + case "ANSIC": + return f.Date().Format(time.ANSIC), nil + case "UnixDate": + return f.Date().Format(time.UnixDate), nil + case "RubyDate": + return f.Date().Format(time.RubyDate), nil + case "RFC822": + return f.Date().Format(time.RFC822), nil + case "RFC822Z": + return f.Date().Format(time.RFC822Z), nil + case "RFC850": + return f.Date().Format(time.RFC850), nil + case "RFC1123": + return f.Date().Format(time.RFC1123), nil + case "RFC1123Z": + return f.Date().Format(time.RFC1123Z), nil + case "RFC3339": + return f.Date().Format(time.RFC3339), nil + case "RFC3339Nano": + return f.Date().Format(time.RFC3339Nano), nil + default: + if format == "" { + return f.Date().Format(time.RFC3339), nil + } + + return f.Date().Format(javaDateTimeFormatToGolangFormat(format)), nil + } + }, + }) + + AddFuncLookup("daterange", Info{ + Display: "Date Range", + Category: "datetime", + Description: "Random date between two ranges", + Example: "1995-06-15T14:30:00Z", + Output: "string", + Aliases: []string{ + "date interval", "date span", "date window", "between dates", "bounded period", + }, + Keywords: []string{ + "daterange", "range", "between", "date", "time", "random", "bounds", "limits", "window", + }, + Params: []Param{ + { + Field: "startdate", + Display: "Start Date", + Type: "string", + Default: "1970-01-01", + Description: "Start date time string", + }, + { + Field: "enddate", + Display: "End Date", + Type: "string", + Default: time.Now().Format("2006-01-02"), + Description: "End date time string", + }, + { + Field: "format", + Display: "Format", + Type: "string", + Default: "yyyy-MM-dd", + Description: "Date time string format", + }, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + format, err := info.GetString(m, "format") + if err != nil { + return nil, err + } + format = javaDateTimeFormatToGolangFormat(format) + + startdate, err := info.GetString(m, "startdate") + if err != nil { + return nil, err + } + startDateTime, err := time.Parse(format, startdate) + if err != nil { + return nil, err + } + + enddate, err := info.GetString(m, "enddate") + if err != nil { + return nil, err + } + endDateTime, err := time.Parse(format, enddate) + if err != nil { + return nil, err + } + + return DateRange(startDateTime, endDateTime).Format(format), nil + }, + }) + + AddFuncLookup("pastdate", Info{ + Display: "PastDate", + Category: "datetime", + Description: "Date that has occurred before the current moment in time", + Example: "2007-01-24 13:00:35.820738079 +0000 UTC", + Output: "time", + Aliases: []string{ + "past date", "historical date", "previous date", "earlier date", "prior time", + }, + Keywords: []string{ + "date", "time", "occurred", "elapsed", "gone", "expired", "finished", "completed", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return pastDate(f), nil }, + }) + + AddFuncLookup("futuredate", Info{ + Display: "FutureDate", + Category: "datetime", + Description: "Date that has occurred after the current moment in time", + Example: "2107-01-24 13:00:35.820738079 +0000 UTC", + Output: "time", + Aliases: []string{ + "future date", "upcoming date", "next date", "scheduled date", "later time", + }, + Keywords: []string{ + "future", "date", "time", "forthcoming", "prospective", "anticipated", "scheduled", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return futureDate(f), nil }, + }) + + AddFuncLookup("nanosecond", Info{ + Display: "Nanosecond", + Category: "datetime", + Description: "Unit of time equal to one billionth (10^-9) of a second", + Example: "196446360", + Output: "int", + Aliases: []string{ + "nano", "ns value", "tiny time", "ultra precision", "fractional second", + }, + Keywords: []string{ + "nanosecond", "time", "unit", "second", "billionth", "ultra", "high", "resolution", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return nanoSecond(f), nil }, + }) + + AddFuncLookup("second", Info{ + Display: "Second", + Category: "datetime", + Description: "Unit of time equal to 1/60th of a minute", + Example: "43", + Output: "int", + Aliases: []string{ + "second value", "sec unit", "time second", "sixtieth minute", "time slice", + }, + Keywords: []string{ + "second", "time", "unit", "minute", "sixtieth", "duration", "interval", "sixty", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return second(f), nil }, + }) + + AddFuncLookup("minute", Info{ + Display: "Minute", + Category: "datetime", + Description: "Unit of time equal to 60 seconds", + Example: "34", + Output: "int", + Aliases: []string{ + "minute value", "time minute", "sixty seconds", "short period", "clock minute", + }, + Keywords: []string{ + "minute", "time", "unit", "60", "seconds", "duration", "interval", "sixtieth", "hour", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return minute(f), nil }, + }) + + AddFuncLookup("hour", Info{ + Display: "Hour", + Category: "datetime", + Description: "Unit of time equal to 60 minutes", + Example: "8", + Output: "int", + Aliases: []string{ + "hour value", "time hour", "sixty minutes", "clock hour", "time period", + }, + Keywords: []string{ + "hour", "time", "unit", "60", "minutes", "duration", "interval", "day", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return hour(f), nil }, + }) + + AddFuncLookup("day", Info{ + Display: "Day", + Category: "datetime", + Description: "24-hour period equivalent to one rotation of Earth on its axis", + Example: "12", + Output: "int", + Aliases: []string{ + "calendar day", "day value", "earth rotation", "daily unit", "full day", + }, + Keywords: []string{ + "day", "time", "unit", "axis", "24-hour", "calendar", "sunrise", "sunset", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return day(f), nil }, + }) + + AddFuncLookup("weekday", Info{ + Display: "Weekday", + Category: "datetime", + Description: "Day of the week excluding the weekend", + Example: "Friday", + Output: "string", + Aliases: []string{ + "weekday name", "business day", "work day", "monday to friday", "weekday label", + }, + Keywords: []string{ + "weekday", "day", "week", "workday", "business", "calendar", "monday", "tuesday", "wednesday", "thursday", "friday", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return weekDay(f), nil }, + }) + + AddFuncLookup("month", Info{ + Display: "Month", + Category: "datetime", + Description: "Division of the year, typically 30 or 31 days long", + Example: "1", + Output: "string", + Aliases: []string{ + "calendar month", "month value", "monthly unit", "date month", "time month", + }, + Keywords: []string{ + "month", "year", "time", "30", "31", "days", "calendar", "period", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return month(f), nil }, + }) + + AddFuncLookup("monthstring", Info{ + Display: "Month String", + Category: "datetime", + Description: "String representation of a month name", + Example: "September", + Output: "string", + Aliases: []string{ + "month name", "calendar month name", "full month", "month label", "month string", + }, + Keywords: []string{ + "month", "string", "time", "representation", "january", "september", "december", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return monthString(f), nil }, + }) + + AddFuncLookup("year", Info{ + Display: "Year", + Category: "datetime", + Description: "Period of 365 days, the time Earth takes to orbit the Sun", + Example: "1900", + Output: "int", + Aliases: []string{ + "calendar year", "annual period", "orbit year", "year value", "fiscal year", + }, + Keywords: []string{ + "year", "time", "365", "days", "leap", "calendar", "decade", "century", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return year(f), nil }, + }) + + AddFuncLookup("timezone", Info{ + Display: "Timezone", + Category: "datetime", + Description: "Region where the same standard time is used, based on longitudinal divisions of the Earth", + Example: "Kaliningrad Standard Time", + Output: "string", + Aliases: []string{ + "time zone", "tz name", "standard time zone", "geographic zone", "regional time", + }, + Keywords: []string{ + "timezone", "time", "earth", "utc", "gmt", "pst", "est", "cst", "mst", "dst", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return timeZone(f), nil }, + }) + + AddFuncLookup("timezoneabv", Info{ + Display: "Timezone Abbreviation", + Category: "datetime", + Description: "Abbreviated 3-letter word of a timezone", + Example: "KST", + Output: "string", + Aliases: []string{ + "timezone abbr", "tz short code", "abbreviated zone", "short tz name", "zone abbreviation", + }, + Keywords: []string{ + "timezone", "time", "3-letter", "kst", "pst", "est", "gmt", "utc", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return timeZoneAbv(f), nil }, + }) + + AddFuncLookup("timezonefull", Info{ + Display: "Timezone Full", + Category: "datetime", + Description: "Full name of a timezone", + Example: "(UTC+03:00) Kaliningrad, Minsk", + Output: "string", + Aliases: []string{ + "timezone full", "full tz name", "complete zone name", "long tz name", "detailed zone", + }, + Keywords: []string{ + "timezone", "full", "time", "standard", "format", "display", "utc", "gmt", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return timeZoneFull(f), nil }, + }) + + AddFuncLookup("timezoneoffset", Info{ + Display: "Timezone Offset", + Category: "datetime", + Description: "The difference in hours from Coordinated Universal Time (UTC) for a specific region", + Example: "-5", + Output: "float32", + Aliases: []string{ + "utc offset", "gmt offset", "tz shift", "time difference", "offset value", + }, + Keywords: []string{ + "timezone", "offset", "utc", "gmt", "plus", "minus", "east", "west", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return timeZoneOffset(f), nil }, + }) + + AddFuncLookup("timezoneregion", Info{ + Display: "Timezone Region", + Category: "datetime", + Description: "Geographic area sharing the same standard time", + Example: "America/Alaska", + Output: "string", + Aliases: []string{ + "region zone", "geo time region", "tz area", "regional timezone", "country zone", + }, + Keywords: []string{ + "timezone", "time", "america", "europe", "asia", "africa", "australia", "continent", "city", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return timeZoneRegion(f), nil }, + }) + + AddFuncLookup("time", Info{ + Display: "Time", + Category: "datetime", + Description: "Random time string in the specified format", + Example: "14:30:25", + Output: "string", + Aliases: []string{ + "time string", "clock time", "time format", "time value", "hour minute second", + }, + Keywords: []string{ + "time", "clock", "hour", "minute", "second", "format", "24-hour", "12-hour", "am", "pm", + }, + Params: []Param{ + { + Field: "format", + Display: "Format", + Type: "string", + Default: "HH:mm:ss", + Options: []string{"HH:mm:ss", "HH:mm", "hh:mm:ss a", "hh:mm a", "H:mm", "h:mm a"}, + Description: "Time format string. Supports Java time format patterns or Go time format patterns", + }, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + format, err := info.GetString(m, "format") + if err != nil { + return nil, err + } + + // Convert java format to golang format + golangFormat := javaDateTimeFormatToGolangFormat(format) + + // Create a time with today's date but random time + t := time.Date(2000, 1, 1, hour(f), minute(f), second(f), nanoSecond(f), time.UTC) + + return t.Format(golangFormat), nil + }, + }) + + AddFuncLookup("timerange", Info{ + Display: "Time Range", + Category: "datetime", + Description: "Random time string between start and end times", + Example: "10:15:30", + Output: "string", + Aliases: []string{ + "time interval", "time span", "time window", "between times", "bounded time", + }, + Keywords: []string{ + "timerange", "range", "between", "time", "start", "end", "bounds", "limits", "window", + }, + Params: []Param{ + { + Field: "starttime", + Display: "Start Time", + Type: "string", + Default: "00:00:00", + Description: "Start time string in the specified format", + }, + { + Field: "endtime", + Display: "End Time", + Type: "string", + Default: "23:59:59", + Description: "End time string in the specified format", + }, + { + Field: "format", + Display: "Format", + Type: "string", + Default: "HH:mm:ss", + Options: []string{"HH:mm:ss", "HH:mm", "hh:mm:ss a", "hh:mm a", "H:mm", "h:mm a"}, + Description: "Time format string. Supports Java time format patterns or Go time format patterns", + }, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + format, err := info.GetString(m, "format") + if err != nil { + return nil, err + } + + startTime, err := info.GetString(m, "starttime") + if err != nil { + return nil, err + } + + endTime, err := info.GetString(m, "endtime") + if err != nil { + return nil, err + } + + // Convert java format to golang format + golangFormat := javaDateTimeFormatToGolangFormat(format) + + // Parse start and end times + start, err := time.Parse(golangFormat, startTime) + if err != nil { + // If parsing fails, use a default start time + start = time.Date(2000, 1, 1, 0, 0, 0, 0, time.UTC) + } + + end, err := time.Parse(golangFormat, endTime) + if err != nil { + // If parsing fails, use a default end time + end = time.Date(2000, 1, 1, 23, 59, 59, 999999999, time.UTC) + } + + // Generate random time between start and end + startNano := start.UnixNano() + endNano := end.UnixNano() + + if startNano > endNano { + startNano, endNano = endNano, startNano + } + + randomNano := int64(number(f, int(startNano), int(endNano))) + randomTime := time.Unix(0, randomNano).UTC() + + return randomTime.Format(golangFormat), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/doc.go b/vendor/github.com/brianvoe/gofakeit/v7/doc.go new file mode 100644 index 0000000000..dc06a1bfe7 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/doc.go @@ -0,0 +1,4 @@ +/* +Package gofakeit provides a set of functions that generate random data +*/ +package gofakeit diff --git a/vendor/github.com/brianvoe/gofakeit/v7/emoji.go b/vendor/github.com/brianvoe/gofakeit/v7/emoji.go new file mode 100644 index 0000000000..198f46a62a --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/emoji.go @@ -0,0 +1,149 @@ +package gofakeit + +// Emoji will return a random fun emoji +func Emoji() string { return emoji(GlobalFaker) } + +// Emoji will return a random fun emoji +func (f *Faker) Emoji() string { return emoji(f) } + +func emoji(f *Faker) string { return getRandValue(f, []string{"emoji", "emoji"}) } + +// EmojiDescription will return a random fun emoji description +func EmojiDescription() string { return emojiDescription(GlobalFaker) } + +// EmojiDescription will return a random fun emoji description +func (f *Faker) EmojiDescription() string { return emojiDescription(f) } + +func emojiDescription(f *Faker) string { return getRandValue(f, []string{"emoji", "description"}) } + +// EmojiCategory will return a random fun emoji category +func EmojiCategory() string { return emojiCategory(GlobalFaker) } + +// EmojiCategory will return a random fun emoji category +func (f *Faker) EmojiCategory() string { return emojiCategory(f) } + +func emojiCategory(f *Faker) string { return getRandValue(f, []string{"emoji", "category"}) } + +// EmojiAlias will return a random fun emoji alias +func EmojiAlias() string { return emojiAlias(GlobalFaker) } + +// EmojiAlias will return a random fun emoji alias +func (f *Faker) EmojiAlias() string { return emojiAlias(f) } + +func emojiAlias(f *Faker) string { return getRandValue(f, []string{"emoji", "alias"}) } + +// EmojiTag will return a random fun emoji tag +func EmojiTag() string { return emojiTag(GlobalFaker) } + +// EmojiTag will return a random fun emoji tag +func (f *Faker) EmojiTag() string { return emojiTag(f) } + +func emojiTag(f *Faker) string { return getRandValue(f, []string{"emoji", "tag"}) } + +func addEmojiLookup() { + AddFuncLookup("emoji", Info{ + Display: "Emoji", + Category: "emoji", + Description: "Digital symbol expressing feelings or ideas in text messages and online chats", + Example: "🤣", + Output: "string", + Aliases: []string{ + "emoticon symbol", + "chat icon", + "unicode pictograph", + "emotional glyph", + "digital expression", + }, + Keywords: []string{ + "emoji", "symbol", "text", "message", "online", "chats", "ideas", "feelings", "digital", "reaction", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return emoji(f), nil + }, + }) + + AddFuncLookup("emojidescription", Info{ + Display: "Emoji Description", + Category: "emoji", + Description: "Brief explanation of the meaning or emotion conveyed by an emoji", + Example: "face vomiting", + Output: "string", + Aliases: []string{ + "emoji meaning", + "emoji definition", + "emoji explanation", + "emoji summary", + "emoji interpretation", + }, + Keywords: []string{ + "emoji", "brief", "explanation", "meaning", "emotion", "conveyed", "context", "description", "usage", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return emojiDescription(f), nil + }, + }) + + AddFuncLookup("emojicategory", Info{ + Display: "Emoji Category", + Category: "emoji", + Description: "Group or classification of emojis based on their common theme or use, like 'smileys' or 'animals'", + Example: "Smileys & Emotion", + Output: "string", + Aliases: []string{ + "emoji group", + "emoji theme", + "emoji section", + "emoji classification", + "emoji grouping", + }, + Keywords: []string{ + "emoji", "smileys", "emotion", "animals", "theme", "classification", "set", "category", "collection", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return emojiCategory(f), nil + }, + }) + + AddFuncLookup("emojialias", Info{ + Display: "Emoji Alias", + Category: "emoji", + Description: "Alternative name or keyword used to represent a specific emoji in text or code", + Example: "smile", + Output: "string", + Aliases: []string{ + "emoji nickname", + "emoji shorthand", + "emoji label", + "emoji alt text", + "emoji identifier", + }, + Keywords: []string{ + "emoji", "alias", "smile", "code", "specific", "represent", "alternative", "keyword", "mapping", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return emojiAlias(f), nil + }, + }) + + AddFuncLookup("emojitag", Info{ + Display: "Emoji Tag", + Category: "emoji", + Description: "Label or keyword associated with an emoji to categorize or search for it easily", + Example: "happy", + Output: "string", + Aliases: []string{ + "emoji keyword", + "emoji marker", + "emoji label", + "emoji hashtag", + "emoji reference", + }, + Keywords: []string{ + "emoji", "tag", "happy", "associated", "categorize", "search", "label", "index", "metadata", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return emojiTag(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/error.go b/vendor/github.com/brianvoe/gofakeit/v7/error.go new file mode 100644 index 0000000000..ef0072232b --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/error.go @@ -0,0 +1,297 @@ +package gofakeit + +import ( + "errors" +) + +// Error will return a random generic error +func Error() error { + return err(GlobalFaker) +} + +// Error will return a random generic error +func (f *Faker) Error() error { + return err(f) +} + +func err(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "generic"})) + return errors.New(genStr) +} + +// ErrorObject will return a random error object word +func ErrorObject() error { + return errorObject(GlobalFaker) +} + +// ErrorObject will return a random error object word +func (f *Faker) ErrorObject() error { + return errorObject(f) +} + +func errorObject(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "object"})) + return errors.New(genStr) +} + +// ErrorDatabase will return a random database error +func ErrorDatabase() error { + return errorDatabase(GlobalFaker) +} + +// ErrorDatabase will return a random database error +func (f *Faker) ErrorDatabase() error { + return errorDatabase(f) +} + +func errorDatabase(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "database"})) + return errors.New(genStr) +} + +// ErrorGRPC will return a random gRPC error +func ErrorGRPC() error { + return errorGRPC(GlobalFaker) +} + +// ErrorGRPC will return a random gRPC error +func (f *Faker) ErrorGRPC() error { + return errorGRPC(f) +} + +func errorGRPC(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "grpc"})) + return errors.New(genStr) +} + +// ErrorHTTP will return a random HTTP error +func ErrorHTTP() error { + return errorHTTP(GlobalFaker) +} + +// ErrorHTTP will return a random HTTP error +func (f *Faker) ErrorHTTP() error { + return errorHTTP(f) +} + +func errorHTTP(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "http"})) + return errors.New(genStr) +} + +// ErrorHTTPClient will return a random HTTP client error response (400-418) +func ErrorHTTPClient() error { + return errorHTTPClient(GlobalFaker) +} + +// ErrorHTTPClient will return a random HTTP client error response (400-418) +func (f *Faker) ErrorHTTPClient() error { + return errorHTTPClient(f) +} + +func errorHTTPClient(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "http_client"})) + return errors.New(genStr) +} + +// ErrorHTTPServer will return a random HTTP server error response (500-511) +func ErrorHTTPServer() error { + return errorHTTPServer(GlobalFaker) +} + +// ErrorHTTPServer will return a random HTTP server error response (500-511) +func (f *Faker) ErrorHTTPServer() error { + return errorHTTPServer(f) +} + +func errorHTTPServer(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "http_server"})) + return errors.New(genStr) +} + +// ErrorRuntime will return a random runtime error +func ErrorRuntime() error { + return errorRuntime(GlobalFaker) +} + +// ErrorRuntime will return a random runtime error +func (f *Faker) ErrorRuntime() error { + return errorRuntime(f) +} + +func errorRuntime(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "runtime"})) + return errors.New(genStr) +} + +// ErrorValidation will return a random validation error +func ErrorValidation() error { + return errorValidation(GlobalFaker) +} + +// ErrorValidation will return a random validation error +func (f *Faker) ErrorValidation() error { + return errorValidation(f) +} + +func errorValidation(f *Faker) error { + genStr, _ := generate(f, getRandValue(f, []string{"error", "validation"})) + return errors.New(genStr) +} + +func addErrorLookup() { + AddFuncLookup("error", Info{ + Display: "Error", + Category: "error", + Description: "Message displayed by a computer or software when a problem or mistake is encountered", + Example: "syntax error", + Output: "string", + Aliases: []string{ + "fault", "problem", "issue", "bug", "failure", + }, + Keywords: []string{ + "software", "computer", "crash", "exception", "warning", "alert", "diagnostic", "system", "message", "malfunction", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return err(f), nil + }, + }) + + AddFuncLookup("errorobject", Info{ + Display: "Error Object", + Category: "error", + Description: "Various categories conveying details about encountered errors", + Example: "protocol", + Output: "string", + Aliases: []string{ + "category", "classification", "entity", "type", "object detail", + }, + Keywords: []string{ + "protocol", "context", "identifier", "descriptor", "domain", "nature", "tag", "origin", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorObject(f), nil + }, + }) + + AddFuncLookup("errordatabase", Info{ + Display: "Database Error", + Category: "error", + Description: "A problem or issue encountered while accessing or managing a database", + Example: "sql error", + Output: "string", + Aliases: []string{ + "db error", "query issue", "storage failure", "sql fault", "data access problem", + }, + Keywords: []string{ + "connection", "query", "timeout", "transaction", "integrity", "constraint", "lock", "schema", "management", "corruption", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorDatabase(f), nil + }, + }) + + AddFuncLookup("errorgrpc", Info{ + Display: "gRPC Error", + Category: "error", + Description: "Communication failure in the high-performance, open-source universal RPC framework", + Example: "client protocol error", + Output: "string", + Aliases: []string{ + "grpc failure", "rpc error", "rpc failure", "communication fault", "transport issue", + }, + Keywords: []string{ + "protocol", "transport", "client", "server", "connection", "status", "unavailable", "timeout", "stream", "call", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorGRPC(f), nil + }, + }) + + AddFuncLookup("errorhttp", Info{ + Display: "HTTP Error", + Category: "error", + Description: "A problem with a web HTTP request", + Example: "invalid method", + Output: "string", + Aliases: []string{ + "http failure", "network error", "web problem", "request fault", "protocol issue", + }, + Keywords: []string{ + "invalid", "method", "status", "response", "request", "header", "url", "timeout", "redirect", "forbidden", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorHTTP(f), nil + }, + }) + + AddFuncLookup("errorhttpclient", Info{ + Display: "HTTP Client Error", + Category: "error", + Description: "Failure or issue occurring within a client software that sends requests to web servers", + Example: "request timeout", + Output: "string", + Aliases: []string{ + "client failure", "browser error", "request timeout", "frontend fault", "http client issue", + }, + Keywords: []string{ + "timeout", "request", "forbidden", "unauthorized", + "network", "connectivity", "invalid", "failure", "rejected", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorHTTPClient(f), nil + }, + }) + + AddFuncLookup("errorhttpserver", Info{ + Display: "HTTP Server Error", + Category: "error", + Description: "Failure or issue occurring within a server software that receives requests from clients", + Example: "internal server error", + Output: "string", + Aliases: []string{ + "server fault", "backend error", "host issue", "service failure", "internal error", + }, + Keywords: []string{ + "unavailable", "overload", "gateway", "crash", "timeout", "backend", "processing", "failure", "503", "unexpected", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorHTTPServer(f), nil + }, + }) + + AddFuncLookup("errorruntime", Info{ + Display: "Runtime Error", + Category: "error", + Description: "Malfunction occurring during program execution, often causing abrupt termination or unexpected behavior", + Example: "address out of bounds", + Output: "string", + Aliases: []string{ + "execution error", "program crash", "runtime failure", "unexpected fault", "software halt", + }, + Keywords: []string{ + "execution", "segmentation", "overflow", "invalid", "null", "panic", "crash", "termination", "exception", "bug", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorRuntime(f), nil + }, + }) + + AddFuncLookup("errorvalidation", Info{ + Display: "Validation Error", + Category: "error", + Description: "Occurs when input data fails to meet required criteria or format specifications", + Example: "missing required field", + Output: "string", + Aliases: []string{ + "invalid input", "format error", "data check failure", "input rejection", "criteria mismatch", + }, + Keywords: []string{ + "missing", "required", "field", "constraint", "format", "rule", "criteria", "restriction", "validation", "check", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return errorValidation(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/fakeable.go b/vendor/github.com/brianvoe/gofakeit/v7/fakeable.go new file mode 100644 index 0000000000..01932ec27c --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/fakeable.go @@ -0,0 +1,84 @@ +package gofakeit + +import ( + "errors" + "fmt" + "reflect" +) + +// Fakeable is an interface that can be implemented by a type to provide a custom fake value. +type Fakeable interface { + // Fake returns a fake value for the type. + Fake(faker *Faker) (any, error) +} + +func isFakeable(t reflect.Type) bool { + fakeableTyp := reflect.TypeOf((*Fakeable)(nil)).Elem() + + return t.Implements(fakeableTyp) || reflect.PointerTo(t).Implements(fakeableTyp) +} + +func callFake(faker *Faker, v reflect.Value, possibleKinds ...reflect.Kind) (any, error) { + f, ok := v.Addr().Interface().(Fakeable) + if !ok { + return nil, errors.New("not a Fakeable type") + } + + fakedValue, err := f.Fake(faker) + if err != nil { + return nil, fmt.Errorf("error calling Fake: %w", err) + } + k := reflect.TypeOf(fakedValue).Kind() + if !containsKind(possibleKinds, k) { + return nil, fmt.Errorf("returned value kind %q is not amongst the valid ones: %v", k, possibleKinds) + } + + switch k { + case reflect.String: + return reflect.ValueOf(fakedValue).String(), nil + case reflect.Bool: + return reflect.ValueOf(fakedValue).Bool(), nil + case reflect.Int: + return int(reflect.ValueOf(fakedValue).Int()), nil + case reflect.Int8: + return int8(reflect.ValueOf(fakedValue).Int()), nil + case reflect.Int16: + return int16(reflect.ValueOf(fakedValue).Int()), nil + case reflect.Int32: + return int32(reflect.ValueOf(fakedValue).Int()), nil + case reflect.Int64: + return int64(reflect.ValueOf(fakedValue).Int()), nil + case reflect.Uint: + return uint(reflect.ValueOf(fakedValue).Uint()), nil + case reflect.Uint8: + return uint8(reflect.ValueOf(fakedValue).Uint()), nil + case reflect.Uint16: + return uint16(reflect.ValueOf(fakedValue).Uint()), nil + case reflect.Uint32: + return uint32(reflect.ValueOf(fakedValue).Uint()), nil + case reflect.Uint64: + return uint64(reflect.ValueOf(fakedValue).Uint()), nil + case reflect.Float32: + return float32(reflect.ValueOf(fakedValue).Float()), nil + case reflect.Float64: + return float64(reflect.ValueOf(fakedValue).Float()), nil + case reflect.Slice, reflect.Array: + return reflect.ValueOf(fakedValue).Interface(), nil + case reflect.Map: + return reflect.ValueOf(fakedValue).Interface(), nil + case reflect.Struct: + return reflect.ValueOf(fakedValue).Interface(), nil + + default: + return nil, fmt.Errorf("unsupported type %q", k) + } +} + +func containsKind(possibleKinds []reflect.Kind, kind reflect.Kind) bool { + for _, k := range possibleKinds { + if k == kind { + return true + } + } + return false +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/faker.go b/vendor/github.com/brianvoe/gofakeit/v7/faker.go new file mode 100644 index 0000000000..2271eac4ec --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/faker.go @@ -0,0 +1,112 @@ +package gofakeit + +import ( + "errors" + "math/rand/v2" + "reflect" + "sync" + + "github.com/brianvoe/gofakeit/v7/source" +) + +// Create global variable to deal with global function call +var GlobalFaker *Faker = New(0) + +// Faker struct is the primary struct for using localized +type Faker struct { + Rand rand.Source + + // Lock to make thread safe + Locked bool + mu sync.Mutex +} + +// New creates and returns a new Faker struct seeded with a given seed +// using the PCG algorithm in lock mode for thread safety +func New(seed uint64) *Faker { + // If seed is 0, use a random crypto seed + if seed == 0 { + faker := NewFaker(source.NewCrypto(), false) + seed = faker.Uint64() + } + + return &Faker{ + Rand: rand.NewPCG(seed, seed), + Locked: true, + } +} + +// NewFaker takes in a rand.Source and thread lock state and returns a new Faker struct +func NewFaker(src rand.Source, lock bool) *Faker { + return &Faker{ + Rand: src, + Locked: lock, + } +} + +// Seed attempts to seed the Faker with the given seed +func (f *Faker) Seed(args ...any) error { + // Lock if locked + if f.Locked { + f.mu.Lock() + defer f.mu.Unlock() + } + + // Ensure GlobalFaker is not nil and Rand is initialized + if GlobalFaker == nil || GlobalFaker.Rand == nil { + return errors.New("GlobalFaker or GlobalFaker.Rand is nil") + } + + // If args is empty or 0, seed with a random crypto seed + if len(args) == 0 { + faker := NewFaker(source.NewCrypto(), false) + args = append(args, faker.Uint64()) + } + + if args[0] == 0 { + faker := NewFaker(source.NewCrypto(), false) + args[0] = faker.Uint64() + } + + // Retrieve the Seed method + method := reflect.ValueOf(GlobalFaker.Rand).MethodByName("Seed") + if !method.IsValid() { + return errors.New("Seed method not found") + } + + // Adjust args if method requires exactly 2 args but only 1 was provided + if method.Type().NumIn() == 2 && len(args) == 1 { + args = append(args, args[0]) // Duplicate the first value if only one is provided + } + + // Get array of function argument types and prepare converted arguments + argTypes := make([]reflect.Type, method.Type().NumIn()) + convertedArgs := make([]reflect.Value, len(args)) + for i := 0; i < method.Type().NumIn(); i++ { + argTypes[i] = method.Type().In(i) + } + + // Convert args to the expected type by the Seed method + for i, arg := range args { + if i < len(argTypes) { // Ensure arg index is within argTypes bounds + argValue := reflect.ValueOf(arg) + // Check if conversion is necessary + if argValue.Type().ConvertibleTo(argTypes[i]) { + convertedArgs[i] = argValue.Convert(argTypes[i]) + } else { + // If not convertible, use the argument as is (reflectively) + convertedArgs[i] = argValue + } + } + } + + // Dynamically call the Seed method with converted arguments + method.Call(convertedArgs) + + return nil +} + +// Seed attempts to seed the GlobalFaker with the given seed +func Seed(args ...any) error { + return GlobalFaker.Seed(args...) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/file.go b/vendor/github.com/brianvoe/gofakeit/v7/file.go new file mode 100644 index 0000000000..6d5e6fc0ae --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/file.go @@ -0,0 +1,53 @@ +package gofakeit + +// FileExtension will generate a random file extension +func FileExtension() string { return fileExtension(GlobalFaker) } + +// FileExtension will generate a random file extension +func (f *Faker) FileExtension() string { return fileExtension(f) } + +func fileExtension(f *Faker) string { return getRandValue(f, []string{"file", "extension"}) } + +// FileMimeType will generate a random mime file type +func FileMimeType() string { return fileMimeType(GlobalFaker) } + +// FileMimeType will generate a random mime file type +func (f *Faker) FileMimeType() string { return fileMimeType(f) } + +func fileMimeType(f *Faker) string { return getRandValue(f, []string{"file", "mime_type"}) } + +func addFileLookup() { + AddFuncLookup("fileextension", Info{ + Display: "File Extension", + Category: "file", + Description: "Suffix appended to a filename indicating its format or type", + Example: "nes", + Output: "string", + Aliases: []string{ + "extension", "file suffix", "filename ending", "type indicator", "file ending", "format suffix", + }, + Keywords: []string{ + "file", "appended", "indicating", "format", "type", "filename", "suffix", "descriptor", "notation", "identifier", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return fileExtension(f), nil + }, + }) + + AddFuncLookup("filemimetype", Info{ + Display: "File Mime Type", + Category: "file", + Description: "Defines file format and nature for browsers and email clients using standardized identifiers", + Example: "application/json", + Output: "string", + Aliases: []string{ + "mime type", "content type", "internet media type", "media format", "standard identifier", "file format", + }, + Keywords: []string{ + "file", "defines", "nature", "clients", "identifiers", "application", "json", "browser", "email", "protocol", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return fileMimeType(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/finance.go b/vendor/github.com/brianvoe/gofakeit/v7/finance.go new file mode 100644 index 0000000000..dca8e57c9c --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/finance.go @@ -0,0 +1,131 @@ +package gofakeit + +import ( + "strconv" + "unicode" +) + +const cusipStr = upperStr + numericStr + +// CUSIP +func Cusip() string { + return cusip(GlobalFaker) +} + +func (f *Faker) Cusip() string { + return cusip(f) +} + +func cusip(f *Faker) string { + cusipBytes := make([]byte, 8) + for i := 0; i < len(cusipBytes); i++ { + cusipBytes[i] = byte(cusipStr[f.IntN(len(cusipStr))]) + } + + baseCusip := string(cusipBytes) + + chkDigit := cusipChecksumDigit(baseCusip) + return baseCusip + chkDigit +} + +// ISIN +func Isin() string { + return isin(GlobalFaker) +} + +func (f *Faker) Isin() string { + return isin(f) +} + +func isin(f *Faker) string { + countryCode := countryAbr(f) + nsin := cusip(f) + isinChkDig := isinChecksumDigit(countryCode + nsin) + return countryCode + nsin + isinChkDig +} + +// cusipChecksumDigit returns the checksum digit for a CUSIP +func cusipChecksumDigit(cusip string) string { + sum := 0 + for i, c := range cusip { + v := 0 + if unicode.IsDigit(c) { + v = int(c - '0') + } + if unicode.IsLetter(c) { + //0-indexed ordinal position of Letter + 10 + v = int(c-'A') + 10 + } + if i%2 != 0 { + // Multiply odd digits by two + v = v * 2 + } + + sum = sum + int(v/10) + v%10 + } + + return strconv.Itoa((10 - (sum % 10)) % 10) +} + +// isinChecksumDigit returns the checksum digit for an ISIN +func isinChecksumDigit(isin string) string { + isinDigits := make([]int, 0) + for _, c := range isin { + if unicode.IsLetter(c) { + letterVal := int(c) - 55 + // Each digit is added as a separate value + isinDigits = append(isinDigits, letterVal/10) + isinDigits = append(isinDigits, letterVal%10) + } + if unicode.IsDigit(c) { + isinDigits = append(isinDigits, int(c-'0')) + } + } + + oddSum := 0 + evenSum := 0 + + // Take the per digit sum of the digitized ISIN, doubling even indexed digits + for i, d := range isinDigits { + if i%2 == 0 { + elem := 2 * d + if elem > 9 { + // If the element now has two digits, sum those digits + elem = (elem % 10) + (elem / 10) + } + evenSum += elem + } else { + oddSum += d + } + } + + return strconv.Itoa((10 - (oddSum+evenSum)%10) % 10) +} + +// Lookup Adds +func addFinanceLookup() { + AddFuncLookup("cusip", Info{ + Display: "CUSIP", + Category: "finance", + Description: "Unique identifier for securities, especially bonds, in the United States and Canada", + Example: "38259P508", + Output: "string", + Aliases: []string{"identifier", "bond", "security", "us", "canada", "unique"}, + Keywords: []string{"finance", "investment", "trading", "securities", "38259p508", "checksum", "validation", "market"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return cusip(f), nil + }, + }) + AddFuncLookup("isin", Info{ + Display: "ISIN", + Category: "finance", + Description: "International standard code for uniquely identifying securities worldwide", + Example: "CVLRQCZBXQ97", + Output: "string", + Aliases: []string{"international", "securities", "identifier", "stock", "bond", "security"}, + Keywords: []string{"finance", "investment", "trading", "cvlrqczbxq97", "worldwide", "standard", "code", "global"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return isin(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/food.go b/vendor/github.com/brianvoe/gofakeit/v7/food.go new file mode 100644 index 0000000000..7f1a84f81a --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/food.go @@ -0,0 +1,274 @@ +package gofakeit + +import ( + "strings" +) + +// Fruit will return a random fruit name +func Fruit() string { return fruit(GlobalFaker) } + +// Fruit will return a random fruit name +func (f *Faker) Fruit() string { return fruit(f) } + +func fruit(f *Faker) string { return getRandValue(f, []string{"food", "fruit"}) } + +// Vegetable will return a random vegetable name +func Vegetable() string { return vegetable(GlobalFaker) } + +// Vegetable will return a random vegetable name +func (f *Faker) Vegetable() string { return vegetable(f) } + +func vegetable(f *Faker) string { return getRandValue(f, []string{"food", "vegetable"}) } + +// Breakfast will return a random breakfast name +func Breakfast() string { return breakfast(GlobalFaker) } + +// Breakfast will return a random breakfast name +func (f *Faker) Breakfast() string { return breakfast(f) } + +func breakfast(f *Faker) string { + v := getRandValue(f, []string{"food", "breakfast"}) + return strings.ToUpper(v[:1]) + v[1:] +} + +// Lunch will return a random lunch name +func Lunch() string { return lunch(GlobalFaker) } + +// Lunch will return a random lunch name +func (f *Faker) Lunch() string { return lunch(f) } + +func lunch(f *Faker) string { + v := getRandValue(f, []string{"food", "lunch"}) + return strings.ToUpper(v[:1]) + v[1:] +} + +// Dinner will return a random dinner name +func Dinner() string { return dinner(GlobalFaker) } + +// Dinner will return a random dinner name +func (f *Faker) Dinner() string { return dinner(f) } + +func dinner(f *Faker) string { + v := getRandValue(f, []string{"food", "dinner"}) + return strings.ToUpper(v[:1]) + v[1:] +} + +// Drink will return a random drink name +func Drink() string { return drink(GlobalFaker) } + +// Drink will return a random drink name +func (f *Faker) Drink() string { return drink(f) } + +func drink(f *Faker) string { + v := getRandValue(f, []string{"food", "drink"}) + return strings.ToUpper(v[:1]) + v[1:] +} + +// Snack will return a random snack name +func Snack() string { return snack(GlobalFaker) } + +// Snack will return a random snack name +func (f *Faker) Snack() string { return snack(f) } + +func snack(f *Faker) string { + v := getRandValue(f, []string{"food", "snack"}) + return strings.ToUpper(v[:1]) + v[1:] +} + +// Dessert will return a random dessert name +func Dessert() string { return dessert(GlobalFaker) } + +// Dessert will return a random dessert name +func (f *Faker) Dessert() string { return dessert(f) } + +func dessert(f *Faker) string { + v := getRandValue(f, []string{"food", "dessert"}) + return strings.ToUpper(v[:1]) + v[1:] +} + +func addFoodLookup() { + AddFuncLookup("fruit", Info{ + Display: "Fruit", + Category: "food", + Description: "Edible plant part, typically sweet, enjoyed as a natural snack or dessert", + Example: "Peach", + Output: "string", + Aliases: []string{ + "fruit item", + "natural snack", + "sweet produce", + "edible plant food", + "dessert fruit", + }, + Keywords: []string{ + "fruit", "edible", "plant", "peach", + "snack", "dessert", "sweet", "natural", + "produce", "fresh", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return fruit(f), nil + }, + }) + + AddFuncLookup("vegetable", Info{ + Display: "Vegetable", + Category: "food", + Description: "Edible plant or part of a plant, often used in savory cooking or salads", + Example: "Amaranth Leaves", + Output: "string", + Aliases: []string{ + "veggie", + "plant food", + "green produce", + "savory food", + "leafy edible", + }, + Keywords: []string{ + "vegetable", "greens", "produce", "amaranth", + "leaves", "cooking", "salads", "plant", + "edible", "savory", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return vegetable(f), nil + }, + }) + + AddFuncLookup("breakfast", Info{ + Display: "Breakfast", + Category: "food", + Description: "First meal of the day, typically eaten in the morning", + Example: "Blueberry banana happy face pancakes", + Output: "string", + Aliases: []string{ + "morning meal", + "first meal", + "day starter", + "early food", + "sunrise meal", + }, + Keywords: []string{ + "breakfast", "morning", "meal", "start", + "pancakes", "blueberry", "banana", "food", + "first", "early", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return breakfast(f), nil + }, + }) + + AddFuncLookup("lunch", Info{ + Display: "Lunch", + Category: "food", + Description: "Midday meal, often lighter than dinner, eaten around noon", + Example: "No bake hersheys bar pie", + Output: "string", + Aliases: []string{ + "midday meal", + "noon food", + "afternoon meal", + "light meal", + "daytime meal", + }, + Keywords: []string{ + "lunch", "meal", "midday", "noon", + "lighter", "food", "pie", "bar", + "afternoon", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return lunch(f), nil + }, + }) + + AddFuncLookup("dinner", Info{ + Display: "Dinner", + Category: "food", + Description: "Evening meal, typically the day's main and most substantial meal", + Example: "Wild addicting dip", + Output: "string", + Aliases: []string{ + "evening meal", + "main meal", + "days supper", + "night food", + "hearty meal", + }, + Keywords: []string{ + "dinner", "supper", "evening", "meal", + "main", "substantial", "night", "food", + "heavy", "course", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return dinner(f), nil + }, + }) + + AddFuncLookup("drink", Info{ + Display: "Drink", + Category: "food", + Description: "Liquid consumed for hydration, pleasure, or nutritional benefits", + Example: "Soda", + Output: "string", + Aliases: []string{ + "beverage", + "refreshment", + "hydration", + "liquid food", + "consumable fluid", + }, + Keywords: []string{ + "drink", "soda", "liquid", + "pleasure", "nutrition", "fluid", "quencher", + "consumed", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return drink(f), nil + }, + }) + + AddFuncLookup("snack", Info{ + Display: "Snack", + Category: "food", + Description: "Small, quick food item eaten between meals", + Example: "Trail mix", + Output: "string", + Aliases: []string{ + "light bite", + "quick food", + "mini meal", + "finger food", + "nibble", + }, + Keywords: []string{ + "snack", "between", "meals", "quick", + "small", "food", "item", "random", + "bite", "treat", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return snack(f), nil + }, + }) + + AddFuncLookup("dessert", Info{ + Display: "Dessert", + Category: "food", + Description: "Sweet treat often enjoyed after a meal", + Example: "French napoleons", + Output: "string", + Aliases: []string{ + "after meal sweet", + "pastry treat", + "confection", + "final course", + "delicacy", + }, + Keywords: []string{ + "dessert", "sweet", "treat", "meal", + "after", "pastry", "cake", "enjoyed", + "final", "sugar", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return dessert(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/game.go b/vendor/github.com/brianvoe/gofakeit/v7/game.go new file mode 100644 index 0000000000..705b8579b4 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/game.go @@ -0,0 +1,126 @@ +package gofakeit + +import ( + "fmt" + "strings" +) + +// Gamertag will generate a random video game username +func Gamertag() string { return gamertag(GlobalFaker) } + +// Gamertag will generate a random video game username +func (f *Faker) Gamertag() string { return gamertag(f) } + +func gamertag(f *Faker) string { + str := "" + num := number(f, 1, 4) + switch num { + case 1: + str = fmt.Sprintf("%s%ser", title(nounConcrete(f)), title(verbAction(f))) + case 2: + str = fmt.Sprintf("%s%s", title(adjectiveDescriptive(f)), title(animal(f))) + case 3: + str = fmt.Sprintf("%s%s", title(adjectiveDescriptive(f)), title(nounConcrete(f))) + case 4: + str = fmt.Sprintf("%s%s", title(fruit(f)), title(adjectiveDescriptive(f))) + } + + // Randomly determine if we should add a number + if f.IntN(3) == 1 { + str += digitN(f, uint(number(f, 1, 3))) + } + + // Remove any spaces + str = strings.Replace(str, " ", "", -1) + + return str +} + +// Dice will generate a random set of dice +func Dice(numDice uint, sides []uint) []uint { return dice(GlobalFaker, numDice, sides) } + +// Dice will generate a random set of dice +func (f *Faker) Dice(numDice uint, sides []uint) []uint { return dice(f, numDice, sides) } + +func dice(f *Faker, numDice uint, sides []uint) []uint { + dice := make([]uint, numDice) + + // If we dont have any sides well set the sides to 6 + if len(sides) == 0 { + sides = []uint{6} + } + + for i := range dice { + // If sides[i] doesnt exist use the first side + if len(sides)-1 < i { + dice[i] = uint(number(f, 1, int(sides[0]))) + } else { + dice[i] = uint(number(f, 1, int(sides[i]))) + } + } + + return dice +} + +func addGameLookup() { + AddFuncLookup("gamertag", Info{ + Display: "Gamertag", + Category: "game", + Description: "User-selected online username or alias used for identification in games", + Example: "footinterpret63", + Output: "string", + Aliases: []string{ + "player handle", + "gaming nickname", + "online tag", + "user alias", + "profile name", + }, + Keywords: []string{ + "gamertag", "user-selected", "username", + "alias", "identification", "online", "gaming", + "video", "games", "player", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return gamertag(f), nil + }, + }) + + AddFuncLookup("dice", Info{ + Display: "Dice", + Category: "game", + Description: "Small, cube-shaped objects used in games of chance for random outcomes", + Example: "[5, 2, 3]", + Output: "[]uint", + Aliases: []string{ + "rolling cubes", + "chance cubes", + "game dice", + "random rollers", + "luck blocks", + }, + Keywords: []string{ + "dice", "games", "cube-shaped", "chance", + "random", "outcomes", "roll", "sides", + "objects", "probability", + }, + Params: []Param{ + {Field: "numdice", Display: "Number of Dice", Type: "uint", Default: "1", Description: "Number of dice to roll"}, + {Field: "sides", Display: "Number of Sides", Type: "[]uint", Default: "[6]", Description: "Number of sides on each dice"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + numDice, err := info.GetUint(m, "numdice") + if err != nil { + return nil, err + } + + sides, err := info.GetUintArray(m, "sides") + if err != nil { + return nil, err + } + + return dice(f, numDice, sides), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/generate.go b/vendor/github.com/brianvoe/gofakeit/v7/generate.go new file mode 100644 index 0000000000..d4ba4ad1e3 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/generate.go @@ -0,0 +1,645 @@ +package gofakeit + +import ( + "encoding/json" + "errors" + "fmt" + "math" + "regexp/syntax" + "strings" +) + +// Generate fake information from given string. +// Replaceable values should be within {} +// +// Functions +// Ex: {firstname} - billy +// Ex: {sentence:3} - Record river mind. +// Ex: {number:1,10} - 4 +// Ex: {uuid} - 590c1440-9888-45b0-bd51-a817ee07c3f2 +// +// Letters/Numbers +// Ex: ### - 481 - random numbers +// Ex: ??? - fda - random letters +// +// For a complete list of runnable functions use FuncsLookup +func Generate(dataVal string) (string, error) { return generate(GlobalFaker, dataVal) } + +// Generate fake information from given string. +// Replaceable values should be within {} +// +// Functions +// Ex: {firstname} - billy +// Ex: {sentence:3} - Record river mind. +// Ex: {number:1,10} - 4 +// Ex: {uuid} - 590c1440-9888-45b0-bd51-a817ee07c3f2 +// +// Letters/Numbers +// Ex: ### - 481 - random numbers +// Ex: ??? - fda - random letters +// +// For a complete list of runnable functions use FuncsLookup +func (f *Faker) Generate(dataVal string) (string, error) { return generate(f, dataVal) } + +func generate(f *Faker, dataVal string) (string, error) { + // Replace # with numbers and ? with letters + dataVal = replaceWithNumbers(f, dataVal) + dataVal = replaceWithLetters(f, dataVal) + + // Check if string has any replaceable values + // Even if it doesnt its ok we will just return the string + if !strings.Contains(dataVal, "{") && !strings.Contains(dataVal, "}") { + return dataVal, nil + } + + // Variables to identify the index in which it exists + startCurly := -1 + startCurlyIgnore := []int{} + endCurly := -1 + endCurlyIgnore := []int{} + + // Loop through string characters + for i := 0; i < len(dataVal); i++ { + // Check for ignores if equal skip + shouldSkip := false + for _, igs := range startCurlyIgnore { + if i == igs { + shouldSkip = true + } + } + for _, ige := range endCurlyIgnore { + if i == ige { + shouldSkip = true + } + } + if shouldSkip { + continue + } + + // Identify items between brackets. Ex: {firstname} + if string(dataVal[i]) == "{" { + startCurly = i + continue + } + if startCurly != -1 && string(dataVal[i]) == "}" { + endCurly = i + } + if startCurly == -1 || endCurly == -1 { + continue + } + + // Get the value between brackets + fParts := dataVal[startCurly+1 : endCurly] + + // Check if has params separated by : + fNameSplit := strings.SplitN(fParts, ":", 2) + fName := "" + fParams := "" + if len(fNameSplit) >= 1 { + fName = fNameSplit[0] + } + if len(fNameSplit) >= 2 { + fParams = fNameSplit[1] + } + + // Check to see if its a replaceable lookup function + if info := GetFuncLookup(fName); info != nil { + // Get parameters, make sure params and the split both have values + mapParams := NewMapParams() + paramsLen := len(info.Params) + + // If just one param and its a string simply just pass it + if paramsLen == 1 && info.Params[0].Type == "string" { + mapParams.Add(info.Params[0].Field, fParams) + } else if paramsLen > 0 && fParams != "" { + var err error + splitVals, err := funcLookupSplit(fParams) + if err != nil { + return "", err + } + mapParams, err = addSplitValsToMapParams(splitVals, info, mapParams) + if err != nil { + return "", err + } + } + if mapParams.Size() == 0 { + mapParams = nil + } + + // Call function + fValue, err := info.Generate(f, mapParams, info) + if err != nil { + return "", err + } + + // Successfully found, run replace with new value + dataVal = strings.Replace(dataVal, "{"+fParts+"}", fmt.Sprintf("%v", fValue), 1) + + // Reset the curly index back to -1 and reset ignores + startCurly = -1 + startCurlyIgnore = []int{} + endCurly = -1 + endCurlyIgnore = []int{} + i = -1 // Reset back to the start of the string + continue + } + + // Couldnt find anything - mark curly brackets to skip and rerun + startCurlyIgnore = append(startCurlyIgnore, startCurly) + endCurlyIgnore = append(endCurlyIgnore, endCurly) + + // Reset the curly index back to -1 + startCurly = -1 + endCurly = -1 + i = -1 // Reset back to the start of the string + continue + } + + return dataVal, nil +} + +// FixedWidthOptions defines values needed for csv generation +type FixedWidthOptions struct { + RowCount int `json:"row_count" xml:"row_count" fake:"{number:1,10}"` + Fields []Field `json:"fields" xml:"fields" fake:"{fields}"` +} + +// FixedWidth generates an table of random data in fixed width format +// A nil FixedWidthOptions returns a randomly structured FixedWidth. +func FixedWidth(co *FixedWidthOptions) (string, error) { return fixeWidthFunc(GlobalFaker, co) } + +// FixedWidth generates an table of random data in fixed width format +// A nil FixedWidthOptions returns a randomly structured FixedWidth. +func (f *Faker) FixedWidth(co *FixedWidthOptions) (string, error) { return fixeWidthFunc(f, co) } + +// Function to generate a fixed width document +func fixeWidthFunc(f *Faker, co *FixedWidthOptions) (string, error) { + // If we didn't get FixedWidthOptions, create a new random one + if co == nil { + co = &FixedWidthOptions{} + } + + // Make sure you set a row count + if co.RowCount <= 0 { + co.RowCount = f.IntN(10) + 1 + } + + // Check fields + if len(co.Fields) <= 0 { + // Create random fields + co.Fields = []Field{ + {Name: "Name", Function: "{firstname} {lastname}"}, + {Name: "Email", Function: "email"}, + {Name: "Password", Function: "password", Params: MapParams{"special": {"false"}, "space": {"false"}}}, + } + } + + data := [][]string{} + hasHeader := false + + // Loop through fields, generate data and add to data array + for _, field := range co.Fields { + // Start new row + row := []string{} + + // Add name to first value + if field.Name != "" { + hasHeader = true + } + row = append(row, field.Name) + + // Get function + funcInfo := GetFuncLookup(field.Function) + var value any + if funcInfo == nil { + // Try to run the function through generate + for i := 0; i < co.RowCount; i++ { + genStr, err := generate(f, field.Function) + if err != nil { + return "", err + } + + row = append(row, genStr) + } + } else { + // Generate function value + var err error + for i := 0; i < co.RowCount; i++ { + value, err = funcInfo.Generate(f, &field.Params, funcInfo) + if err != nil { + return "", err + } + + // Add value to row + row = append(row, anyToString(value)) + } + } + + // Add row to data + data = append(data, row) + } + + var result strings.Builder + + // Calculate column widths + colWidths := make([]int, len(data)) + for i, row := range data { + for _, value := range row { + width := len(value) + 5 + if width > colWidths[i] { + colWidths[i] = width + } + } + } + + // Append table rows to the string, excluding the entire row if the first value is empty + for i := 0; i < len(data[0]); i++ { + if !hasHeader && i == 0 { + continue // Skip the entire column if the first value is empty + } + + var resultRow strings.Builder + for j, row := range data { + resultRow.WriteString(fmt.Sprintf("%-*s", colWidths[j], row[i])) + } + + // Trim trailing spaces + result.WriteString(strings.TrimRight(resultRow.String(), " ")) + + // Only add new line if not the last row + if i != len(data[0])-1 { + result.WriteString("\n") + } + } + + return result.String(), nil +} + +// Regex will generate a string based upon a RE2 syntax +func Regex(regexStr string) string { return regex(GlobalFaker, regexStr) } + +// Regex will generate a string based upon a RE2 syntax +func (f *Faker) Regex(regexStr string) string { return regex(f, regexStr) } + +func regex(f *Faker, regexStr string) (gen string) { + re, err := syntax.Parse(regexStr, syntax.Perl) + if err != nil { + return "Could not parse regex string" + } + + // Panic catch + defer func() { + if r := recover(); r != nil { + gen = fmt.Sprint(f) + return + + } + }() + + return regexGenerate(f, re, len(regexStr)*100) +} + +func regexGenerate(f *Faker, re *syntax.Regexp, limit int) string { + if limit <= 0 { + panic("Length limit reached when generating output") + } + + op := re.Op + switch op { + case syntax.OpNoMatch: // matches no strings + // Do Nothing + case syntax.OpEmptyMatch: // matches empty string + return "" + case syntax.OpLiteral: // matches Runes sequence + var b strings.Builder + for _, ru := range re.Rune { + b.WriteRune(ru) + } + return b.String() + case syntax.OpCharClass: // matches Runes interpreted as range pair list + // number of possible chars + sum := 0 + for i := 0; i < len(re.Rune); i += 2 { + sum += int(re.Rune[i+1]-re.Rune[i]) + 1 + if re.Rune[i+1] == 0x10ffff { // rune range end + sum = -1 + break + } + } + + // pick random char in range (inverse match group) + if sum == -1 { + chars := []uint8{} + for j := 0; j < len(allStr); j++ { + c := allStr[j] + + // Check c in range + for i := 0; i < len(re.Rune); i += 2 { + if rune(c) >= re.Rune[i] && rune(c) <= re.Rune[i+1] { + chars = append(chars, c) + break + } + } + } + if len(chars) > 0 { + return string([]byte{chars[f.IntN(len(chars))]}) + } + } + + r := f.IntN(int(sum)) + var ru rune + sum = 0 + for i := 0; i < len(re.Rune); i += 2 { + gap := int(re.Rune[i+1]-re.Rune[i]) + 1 + if sum+gap > r { + ru = re.Rune[i] + rune(r-sum) + break + } + sum += gap + } + + return string(ru) + case syntax.OpAnyCharNotNL, syntax.OpAnyChar: // matches any character(and except newline) + return randCharacter(f, allStr) + case syntax.OpBeginLine: // matches empty string at beginning of line + case syntax.OpEndLine: // matches empty string at end of line + case syntax.OpBeginText: // matches empty string at beginning of text + case syntax.OpEndText: // matches empty string at end of text + case syntax.OpWordBoundary: // matches word boundary `\b` + case syntax.OpNoWordBoundary: // matches word non-boundary `\B` + case syntax.OpCapture: // capturing subexpression with index Cap, optional name Name + return regexGenerate(f, re.Sub0[0], limit) + case syntax.OpStar: // matches Sub[0] zero or more times + var b strings.Builder + for i := 0; i < number(f, 0, 10); i++ { + for _, rs := range re.Sub { + b.WriteString(regexGenerate(f, rs, limit-b.Len())) + } + } + return b.String() + case syntax.OpPlus: // matches Sub[0] one or more times + var b strings.Builder + for i := 0; i < number(f, 1, 10); i++ { + for _, rs := range re.Sub { + b.WriteString(regexGenerate(f, rs, limit-b.Len())) + } + } + return b.String() + case syntax.OpQuest: // matches Sub[0] zero or one times + var b strings.Builder + for i := 0; i < number(f, 0, 1); i++ { + for _, rs := range re.Sub { + b.WriteString(regexGenerate(f, rs, limit-b.Len())) + } + } + return b.String() + case syntax.OpRepeat: // matches Sub[0] at least Min times, at most Max (Max == -1 is no limit) + var b strings.Builder + count := 0 + re.Max = int(math.Min(float64(re.Max), float64(10))) + if re.Max > re.Min { + count = f.IntN(re.Max - re.Min + 1) + } + for i := 0; i < re.Min || i < (re.Min+count); i++ { + for _, rs := range re.Sub { + b.WriteString(regexGenerate(f, rs, limit-b.Len())) + } + } + return b.String() + case syntax.OpConcat: // matches concatenation of Subs + var b strings.Builder + for _, rs := range re.Sub { + b.WriteString(regexGenerate(f, rs, limit-b.Len())) + } + return b.String() + case syntax.OpAlternate: // matches alternation of Subs + return regexGenerate(f, re.Sub[number(f, 0, len(re.Sub)-1)], limit) + } + + return "" +} + +// Map will generate a random set of map data +func Map() map[string]any { return mapFunc(GlobalFaker) } + +// Map will generate a random set of map data +func (f *Faker) Map() map[string]any { return mapFunc(f) } + +func mapFunc(f *Faker) map[string]any { + m := map[string]any{} + + randWordType := func() string { + s := randomString(f, []string{"lorem", "bs", "job", "name", "address"}) + switch s { + case "bs": + return bs(f) + case "job": + return jobTitle(f) + case "name": + return name(f) + case "address": + return street(f) + ", " + city(f) + ", " + state(f) + " " + zip(f) + } + return word(f) + } + + randSlice := func() []string { + var sl []string + for ii := 0; ii < number(f, 3, 10); ii++ { + sl = append(sl, word(f)) + } + return sl + } + + for i := 0; i < number(f, 3, 10); i++ { + t := randomString(f, []string{"string", "int", "float", "slice", "map"}) + switch t { + case "string": + m[word(f)] = randWordType() + case "int": + m[word(f)] = number(f, 1, 10000000) + case "float": + m[word(f)] = float32Range(f, 1, 1000000) + case "slice": + m[word(f)] = randSlice() + case "map": + mm := map[string]any{} + tt := randomString(f, []string{"string", "int", "float", "slice"}) + switch tt { + case "string": + mm[word(f)] = randWordType() + case "int": + mm[word(f)] = number(f, 1, 10000000) + case "float": + mm[word(f)] = float32Range(f, 1, 1000000) + case "slice": + mm[word(f)] = randSlice() + } + m[word(f)] = mm + } + } + + return m +} + +func addGenerateLookup() { + AddFuncLookup("generate", Info{ + Display: "Generate", + Category: "generate", + Description: "Random string generated from string value based upon available data sets", + Example: "{firstname} {lastname} {email} - Markus Moen markusmoen@pagac.net", + Output: "string", + Aliases: []string{ + "template expander", + "placeholder interpolator", + "variable substitution", + "token formatter", + "pattern builder", + "macro resolver", + }, + Keywords: []string{ + "generate", "upon", "datasets", "random", + "string", "value", "available", "data", + "sets", "based", + }, + Params: []Param{ + {Field: "str", Display: "String", Type: "string", Description: "String value to generate from"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + str, err := info.GetString(m, "str") + if err != nil { + return nil, err + } + + // Limit the length of the string passed + if len(str) > 1000 { + return nil, errors.New("string length is too large. limit to 1000 characters") + } + + return generate(f, str) + }, + }) + + AddFuncLookup("fixed_width", Info{ + Display: "Fixed Width", + Category: "generate", + Description: "Fixed width rows of output data based on input fields", + Example: `Name Email Password Age +Markus Moen sylvanmraz@murphy.net 6VlvH6qqXc7g 13 +Alayna Wuckert santinostanton@carroll.biz g7sLrS0gEwLO 46 +Lura Lockman zacherykuhic@feil.name S8gV7Z64KlHG 12`, + Output: "[]byte", + ContentType: "text/plain", + Aliases: []string{ + "fixed rows", "columnar data", "padded text", "aligned output", "structured fields", + }, + Keywords: []string{ + "tabular", "data", "format", "alignment", "columns", "rows", "layout", "monospace", "table", "presentation", + }, + Params: []Param{ + {Field: "rowcount", Display: "Row Count", Type: "int", Default: "10", Description: "Number of rows"}, + {Field: "fields", Display: "Fields", Type: "[]Field", Description: "Fields name, function and params"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + co := FixedWidthOptions{} + + rowCount, err := info.GetInt(m, "rowcount") + if err != nil { + return nil, err + } + + co.RowCount = rowCount + + fields, _ := info.GetStringArray(m, "fields") + + // Check to make sure fields has length + if len(fields) > 0 { + co.Fields = make([]Field, len(fields)) + for i, f := range fields { + // Unmarshal fields string into fields array + err = json.Unmarshal([]byte(f), &co.Fields[i]) + if err != nil { + return nil, err + } + } + } else { + return nil, errors.New("missing fields") + } + + out, err := fixeWidthFunc(f, &co) + if err != nil { + return nil, err + } + + return out, nil + }, + }) + + AddFuncLookup("regex", Info{ + Display: "Regex", + Category: "generate", + Description: "Pattern-matching tool used in text processing to search and manipulate strings", + Example: "[abcdef]{5} - affec", + Output: "string", + Aliases: []string{ + "regular expression", + "string matcher", + "text parser", + "pattern engine", + "token analyzer", + "rule evaluator", + }, + Keywords: []string{ + "regex", "strings", "re2", "syntax", + "pattern-matching", "tool", "search", + "validation", "compile", "replace", + }, + Params: []Param{ + {Field: "str", Display: "String", Type: "string", Description: "Regex RE2 syntax string"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + str, err := info.GetString(m, "str") + if err != nil { + return nil, err + } + + // Limit the length of the string passed + if len(str) > 500 { + return nil, errors.New("string length is too large. limit to 500 characters") + } + + return regex(f, str), nil + }, + }) + + AddFuncLookup("map", Info{ + Display: "Map", + Category: "generate", + Description: "Data structure that stores key-value pairs", + Example: `{ + "software": 7518355, + "that": ["despite", "pack", "whereas", "recently", "there", "anyone", "time", "read"], + "use": 683598, + "whom": "innovate", + "yourselves": 1987784 +}`, + Output: "map[string]any", + ContentType: "application/json", + Aliases: []string{ + "associative array", + "lookup table", + "symbol table", + "keyed collection", + "map structure", + "object store", + }, + Keywords: []string{ + "map", "stores", "key", "value", + "dictionary", "hash", "collection", + "pairs", "keys", "values", "structure", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return mapFunc(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/hacker.go b/vendor/github.com/brianvoe/gofakeit/v7/hacker.go new file mode 100644 index 0000000000..1914332593 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/hacker.go @@ -0,0 +1,179 @@ +package gofakeit + +import ( + "strings" +) + +// HackerPhrase will return a random hacker sentence +func HackerPhrase() string { return hackerPhrase(GlobalFaker) } + +// HackerPhrase will return a random hacker sentence +func (f *Faker) HackerPhrase() string { return hackerPhrase(f) } + +func hackerPhrase(f *Faker) string { + genStr, _ := generate(f, getRandValue(f, []string{"hacker", "phrase"})) + + words := strings.Split(genStr, " ") + words[0] = strings.ToUpper(words[0][0:1]) + words[0][1:] + return strings.Join(words, " ") +} + +// HackerAbbreviation will return a random hacker abbreviation +func HackerAbbreviation() string { return hackerAbbreviation(GlobalFaker) } + +// HackerAbbreviation will return a random hacker abbreviation +func (f *Faker) HackerAbbreviation() string { return hackerAbbreviation(f) } + +func hackerAbbreviation(f *Faker) string { + return getRandValue(f, []string{"hacker", "abbreviation"}) +} + +// HackerAdjective will return a random hacker adjective +func HackerAdjective() string { return hackerAdjective(GlobalFaker) } + +// HackerAdjective will return a random hacker adjective +func (f *Faker) HackerAdjective() string { return hackerAdjective(f) } + +func hackerAdjective(f *Faker) string { + return getRandValue(f, []string{"hacker", "adjective"}) +} + +// HackerNoun will return a random hacker noun +func HackerNoun() string { return hackerNoun(GlobalFaker) } + +// HackerNoun will return a random hacker noun +func (f *Faker) HackerNoun() string { return hackerNoun(f) } + +func hackerNoun(f *Faker) string { + return getRandValue(f, []string{"hacker", "noun"}) +} + +// HackerVerb will return a random hacker verb +func HackerVerb() string { return hackerVerb(GlobalFaker) } + +// HackerVerb will return a random hacker verb +func (f *Faker) HackerVerb() string { return hackerVerb(f) } + +func hackerVerb(f *Faker) string { + return getRandValue(f, []string{"hacker", "verb"}) +} + +// HackeringVerb will return a random hacker ingverb +func HackeringVerb() string { return hackeringVerb(GlobalFaker) } + +// HackeringVerb will return a random hacker ingverb +func (f *Faker) HackeringVerb() string { return hackeringVerb(f) } + +func hackeringVerb(f *Faker) string { + return getRandValue(f, []string{"hacker", "ingverb"}) +} + +func addHackerLookup() { + AddFuncLookup("hackerphrase", Info{ + Display: "Hacker Phrase", + Category: "hacker", + Description: "Informal jargon and slang used in the hacking and cybersecurity community", + Example: "If we calculate the program, we can get to the AI pixel through the redundant XSS matrix!", + Output: "string", + Aliases: []string{ + "hacker jargon", "cyber phrase", "security slang", "tech quip", "infosec phrase", + }, + Keywords: []string{ + "phrase", "jargon", "slang", "informal", "community", + "calculate", "program", "ai", "pixel", "redundant", "xss", "matrix", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hackerPhrase(f), nil + }, + }) + + AddFuncLookup("hackerabbreviation", Info{ + Display: "Hacker Abbreviation", + Category: "hacker", + Description: "Abbreviations and acronyms commonly used in the hacking and cybersecurity community", + Example: "ADP", + Output: "string", + Aliases: []string{ + "infosec acronym", "tech abbreviation", "security acronym", "cyber acronym", "hacker shorthand", + }, + Keywords: []string{ + "abbreviation", "acronym", "short", "code", "initialism", + "common", "used", "security", "community", "terminology", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hackerAbbreviation(f), nil + }, + }) + + AddFuncLookup("hackeradjective", Info{ + Display: "Hacker Adjective", + Category: "hacker", + Description: "Adjectives describing terms often associated with hackers and cybersecurity experts", + Example: "wireless", + Output: "string", + Aliases: []string{ + "hacker descriptor", "cyber adjective", "infosec modifier", "security adjective", "tech describing word", + }, + Keywords: []string{ + "adjective", "descriptive", "term", "modifier", "attribute", + "wireless", "connected", "digital", "virtual", "networked", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hackerAdjective(f), nil + }, + }) + + AddFuncLookup("hackernoun", Info{ + Display: "Hacker Noun", + Category: "hacker", + Description: "Noun representing an element, tool, or concept within the realm of hacking and cybersecurity", + Example: "driver", + Output: "string", + Aliases: []string{ + "hacking tool", "cyber noun", "security concept", "tech object", "infosec element", + }, + Keywords: []string{ + "noun", "element", "tool", "concept", "object", + "driver", "exploit", "payload", "virus", "device", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hackerNoun(f), nil + }, + }) + + AddFuncLookup("hackerverb", Info{ + Display: "Hacker Verb", + Category: "hacker", + Description: "Verbs associated with actions and activities in the field of hacking and cybersecurity", + Example: "synthesize", + Output: "string", + Aliases: []string{ + "hacking verb", "cyber action", "infosec verb", "tech activity", "security verb", + }, + Keywords: []string{ + "verb", "action", "activity", "task", "operation", + "synthesize", "exploit", "inject", "bypass", "scan", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hackerVerb(f), nil + }, + }) + + AddFuncLookup("hackeringverb", Info{ + Display: "Hackering Verb", + Category: "hacker", + Description: "Verb describing actions and activities related to hacking, often involving computer systems and security", + Example: "connecting", + Output: "string", + Aliases: []string{ + "hacking action", "present participle", "cyber verb", "infosec activity", "progressive verb", + }, + Keywords: []string{ + "verb", "ing", "connecting", "probing", "listening", + "systems", "process", "computer", "security", "operation", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hackeringVerb(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/helpers.go b/vendor/github.com/brianvoe/gofakeit/v7/helpers.go new file mode 100644 index 0000000000..6773e39f26 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/helpers.go @@ -0,0 +1,374 @@ +package gofakeit + +import ( + "encoding/json" + "fmt" + "math" + "reflect" + "strings" + "unicode" + + "github.com/brianvoe/gofakeit/v7/data" +) + +const lowerStr = "abcdefghijklmnopqrstuvwxyz" +const upperStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +const numericStr = "0123456789" +const specialStr = "@#$%&?|!(){}<>=*+-_:;,." +const specialSafeStr = "!@.-_*" // https://github.com/1Password/spg/pull/22 +const spaceStr = " " +const allStr = lowerStr + upperStr + numericStr + specialStr + spaceStr +const vowels = "aeiou" +const hashtag = '#' +const questionmark = '?' +const dash = '-' +const base58 = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" +const minUint = 0 +const maxUint = ^uint(0) +const minInt = -maxInt - 1 +const maxInt = int(^uint(0) >> 1) +const is32bit = ^uint(0)>>32 == 0 + +// Check if in lib +func dataCheck(dataVal []string) bool { + var checkOk bool + + if len(dataVal) == 2 { + _, checkOk = data.Data[dataVal[0]] + if checkOk { + _, checkOk = data.Data[dataVal[0]][dataVal[1]] + } + } + + return checkOk +} + +// Get Random Value +func getRandValue(f *Faker, dataVal []string) string { + if !dataCheck(dataVal) { + return "" + } + return data.Data[dataVal[0]][dataVal[1]][f.IntN(len(data.Data[dataVal[0]][dataVal[1]]))] +} + +// Replace # with numbers +func replaceWithNumbers(f *Faker, str string) string { + if str == "" { + return str + } + bytestr := []byte(str) + for i := 0; i < len(bytestr); i++ { + if bytestr[i] == hashtag { + bytestr[i] = byte(randDigit(f)) + } + } + if bytestr[0] == '0' { + bytestr[0] = byte(f.IntN(8)+1) + '0' + } + + return string(bytestr) +} + +// Replace ? with ASCII lowercase letters +func replaceWithLetters(f *Faker, str string) string { + if str == "" { + return str + } + bytestr := []byte(str) + for i := 0; i < len(bytestr); i++ { + if bytestr[i] == questionmark { + bytestr[i] = byte(randLetter(f)) + } + } + + return string(bytestr) +} + +// Replace ? with ASCII lowercase letters between a and f +func replaceWithHexLetters(f *Faker, str string) string { + if str == "" { + return str + } + bytestr := []byte(str) + for i := 0; i < len(bytestr); i++ { + if bytestr[i] == questionmark { + bytestr[i] = byte(randHexLetter(f)) + } + } + + return string(bytestr) +} + +// Generate random lowercase ASCII letter +func randLetter(f *Faker) rune { + allLetters := upperStr + lowerStr + return rune(allLetters[f.IntN(len(allLetters))]) +} + +func randCharacter(f *Faker, s string) string { + return string(s[f.Int64()%int64(len(s))]) +} + +// Generate random lowercase ASCII letter between a and f +func randHexLetter(f *Faker) rune { + return rune(byte(f.IntN(6)) + 'a') +} + +// Generate random ASCII digit +func randDigit(f *Faker) rune { + return rune(byte(f.IntN(10)) + '0') +} + +// Generate random integer between min and max +func randIntRange(f *Faker, min, max int) int { + if min == max { + return min + } + + if min > max { + min, max = max, min // Swap if min is greater than max + } + + // Use f.IntN to generate a random number in [0, rangeSize) and shift it into [min, max]. + return f.IntN(max-min+1) + min +} + +// Generate random uint between min and max +func randUintRange(f *Faker, min, max uint) uint { + if min == max { + return min // Immediate return if range is zero + } + + if min > max { + min, max = max, min // Swap if min is greater than max + } + + // Use f.UintN to generate a random number in [0, rangeSize) and shift it into [min, max]. + return f.UintN(max-min+1) + min +} + +func toFixed(num float64, precision int) float64 { + output := math.Pow(10, float64(precision)) + return float64(math.Floor(num*output)) / output +} + +func equalSliceString(a, b []string) bool { + sizeA, sizeB := len(a), len(b) + if sizeA != sizeB { + return false + } + + for i, va := range a { + vb := b[i] + + if va != vb { + return false + } + } + return true +} + +func equalSliceInt(a, b []int) bool { + sizeA, sizeB := len(a), len(b) + if sizeA != sizeB { + return false + } + + for i, va := range a { + vb := b[i] + + if va != vb { + return false + } + } + return true +} + +func equalSliceInterface(a, b []any) bool { + sizeA, sizeB := len(a), len(b) + if sizeA != sizeB { + return false + } + + for i, va := range a { + if !reflect.DeepEqual(va, b[i]) { + return false + } + } + return true +} + +func stringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} + +func anyToString(a any) string { + if a == nil { + return "" + } + + // If it's a slice of bytes or struct, unmarshal it into an interface + if bytes, ok := a.([]byte); ok { + return string(bytes) + } + + // If it's a struct, map, or slice, convert to JSON + switch reflect.TypeOf(a).Kind() { + case reflect.Struct, reflect.Map, reflect.Slice: + b, err := json.Marshal(a) + if err == nil { + return string(b) + } + } + + return fmt.Sprintf("%v", a) +} + +// Title returns a copy of the string s with all Unicode letters that begin words +// mapped to their Unicode title case +func title(s string) string { + // isSeparator reports whether the rune could mark a word boundary + isSeparator := func(r rune) bool { + // ASCII alphanumerics and underscore are not separators + if r <= 0x7F { + switch { + case '0' <= r && r <= '9': + return false + case 'a' <= r && r <= 'z': + return false + case 'A' <= r && r <= 'Z': + return false + case r == '_': + return false + } + return true + } + + // Letters and digits are not separators + if unicode.IsLetter(r) || unicode.IsDigit(r) { + return false + } + + // Otherwise, all we can do for now is treat spaces as separators. + return unicode.IsSpace(r) + } + + prev := ' ' + return strings.Map( + func(r rune) rune { + if isSeparator(prev) { + prev = r + return unicode.ToTitle(r) + } + prev = r + return r + }, + s) +} + +func funcLookupSplit(str string) ([]string, error) { + out := []string{} + for str != "" { + if strings.HasPrefix(str, "[") { + startIndex := strings.Index(str, "[") + endIndex := strings.Index(str, "]") + if endIndex == -1 { + return nil, fmt.Errorf("invalid lookup split missing ending ] bracket") + } + + val := str[(startIndex) : endIndex+1] + out = append(out, strings.TrimSpace(val)) + str = strings.Replace(str, val, "", 1) + + // Trim off comma if it has it + if strings.HasPrefix(str, ",") { + str = strings.Replace(str, ",", "", 1) + } + } else { + strSplit := strings.SplitN(str, ",", 2) + strSplitLen := len(strSplit) + if strSplitLen >= 1 { + out = append(out, strings.TrimSpace(strSplit[0])) + } + if strSplitLen >= 2 { + str = strSplit[1] + } else { + str = "" + } + } + } + + return out, nil +} + +// Used for parsing the tag in a struct +func parseNameAndParamsFromTag(tag string) (string, string) { + // Trim the curly on the beginning and end + tag = strings.TrimLeft(tag, "{") + tag = strings.TrimRight(tag, "}") + // Check if has params separated by : + fNameSplit := strings.SplitN(tag, ":", 2) + fName := "" + fParams := "" + if len(fNameSplit) >= 1 { + fName = fNameSplit[0] + } + if len(fNameSplit) >= 2 { + fParams = fNameSplit[1] + } + return fName, fParams +} + +// Used for parsing map params +func parseMapParams(info *Info, fParams string) (*MapParams, error) { + // Get parameters, make sure params and the split both have values + mapParams := NewMapParams() + paramsLen := len(info.Params) + + // If just one param and its a string simply just pass it + if paramsLen == 1 && info.Params[0].Type == "string" { + mapParams.Add(info.Params[0].Field, fParams) + } else if paramsLen > 0 && fParams != "" { + splitVals, err := funcLookupSplit(fParams) + if err != nil { + return nil, err + } + mapParams, err = addSplitValsToMapParams(splitVals, info, mapParams) + if err != nil { + return nil, err + } + } + + // If mapParams doesnt have a size then return nil + if mapParams.Size() == 0 { + return nil, nil + } + + return mapParams, nil +} + +// Used for splitting the values +func addSplitValsToMapParams(splitVals []string, info *Info, mapParams *MapParams) (*MapParams, error) { + for ii := 0; ii < len(splitVals); ii++ { + if len(info.Params)-1 >= ii { + if strings.HasPrefix(splitVals[ii], "[") { + lookupSplits, err := funcLookupSplit(strings.TrimRight(strings.TrimLeft(splitVals[ii], "["), "]")) + if err != nil { + return nil, err + } + + for _, v := range lookupSplits { + mapParams.Add(info.Params[ii].Field, v) + } + } else { + mapParams.Add(info.Params[ii].Field, splitVals[ii]) + } + } + } + return mapParams, nil +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/hipster.go b/vendor/github.com/brianvoe/gofakeit/v7/hipster.go new file mode 100644 index 0000000000..9cac3f6c44 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/hipster.go @@ -0,0 +1,136 @@ +package gofakeit + +import ( + "errors" +) + +// HipsterWord will return a single hipster word +func HipsterWord() string { return hipsterWord(GlobalFaker) } + +// HipsterWord will return a single hipster word +func (f *Faker) HipsterWord() string { return hipsterWord(f) } + +func hipsterWord(f *Faker) string { return getRandValue(f, []string{"hipster", "word"}) } + +// HipsterSentence will generate a random sentence +func HipsterSentence(wordCount int) string { return hipsterSentence(GlobalFaker, wordCount) } + +// HipsterSentence will generate a random sentence +func (f *Faker) HipsterSentence(wordCount int) string { return hipsterSentence(f, wordCount) } + +func hipsterSentence(f *Faker, wordCount int) string { + return sentenceGen(f, wordCount, hipsterWord) +} + +// HipsterParagraph will generate a random paragraphGenerator +// Set Paragraph Count +// Set Sentence Count +// Set Word Count +// Set Paragraph Separator +func HipsterParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return hipsterParagraph(GlobalFaker, paragraphCount, sentenceCount, wordCount, separator) +} + +// HipsterParagraph will generate a random paragraphGenerator +// Set Paragraph Count +// Set Sentence Count +// Set Word Count +// Set Paragraph Separator +func (f *Faker) HipsterParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return hipsterParagraph(f, paragraphCount, sentenceCount, wordCount, separator) +} + +func hipsterParagraph(f *Faker, paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return paragraphGen(f, paragrapOptions{paragraphCount, sentenceCount, wordCount, separator}, hipsterSentence) +} + +func addHipsterLookup() { + AddFuncLookup("hipsterword", Info{ + Display: "Hipster Word", + Category: "hipster", + Description: "Trendy and unconventional vocabulary used by hipsters to express unique cultural preferences", + Example: "microdosing", + Output: "string", + Aliases: []string{"word", "trendy", "unconventional", "vocabulary", "culture", "modern"}, + Keywords: []string{"hipster", "preferences", "microdosing", "artisanal", "craft", "organic", "sustainable", "authentic"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return hipsterWord(f), nil + }, + }) + + AddFuncLookup("hipstersentence", Info{ + Display: "Hipster Sentence", + Category: "hipster", + Description: "Sentence showcasing the use of trendy and unconventional vocabulary associated with hipster culture", + Example: "Microdosing roof chia echo pickled.", + Output: "string", + Aliases: []string{"sentence", "trendy", "unconventional", "vocabulary", "culture", "modern"}, + Keywords: []string{"hipster", "showcasing", "microdosing", "roof", "chia", "echo", "pickled", "artisanal"}, + Params: []Param{ + {Field: "wordcount", Display: "Word Count", Type: "int", Default: "5", Description: "Number of words in a sentence"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + wordCount, err := info.GetInt(m, "wordcount") + if err != nil { + return nil, err + } + if wordCount <= 0 || wordCount > 50 { + return nil, errors.New("invalid word count, must be greater than 0, less than 50") + } + + return hipsterSentence(f, wordCount), nil + }, + }) + + AddFuncLookup("hipsterparagraph", Info{ + Display: "Hipster Paragraph", + Category: "hipster", + Description: "Paragraph showcasing the use of trendy and unconventional vocabulary associated with hipster culture", + Example: `Microdosing roof chia echo pickled meditation cold-pressed raw denim fingerstache normcore sriracha pork belly. Wolf try-hard pop-up blog tilde hashtag health butcher waistcoat paleo portland vinegar. Microdosing sartorial blue bottle slow-carb freegan five dollar toast you probably haven't heard of them asymmetrical chia farm-to-table narwhal banjo. Gluten-free blog authentic literally synth vinyl meh ethical health fixie banh mi Yuccie. Try-hard drinking squid seitan cray VHS echo chillwave hammock kombucha food truck sustainable. + +Pug bushwick hella tote bag cliche direct trade waistcoat yr waistcoat knausgaard pour-over master. Pitchfork jean shorts franzen flexitarian distillery hella meggings austin knausgaard crucifix wolf heirloom. Crucifix food truck you probably haven't heard of them trust fund fixie gentrify pitchfork stumptown mlkshk umami chambray blue bottle. 3 wolf moon swag +1 biodiesel knausgaard semiotics taxidermy meh artisan hoodie +1 blue bottle. Fashion axe forage mixtape Thundercats pork belly whatever 90's beard selfies chambray cred mlkshk. + +Shabby chic typewriter VHS readymade lo-fi bitters PBR&B gentrify lomo raw denim freegan put a bird on it. Raw denim cliche dreamcatcher pug fixie park trust fund migas fingerstache sriracha +1 mustache. Tilde shoreditch kickstarter franzen dreamcatcher green juice mustache neutra polaroid stumptown organic schlitz. Flexitarian ramps chicharrones kogi lo-fi mustache tilde forage street church-key williamsburg taxidermy. Chia mustache plaid mumblecore squid slow-carb disrupt Thundercats goth shoreditch master direct trade.`, + Output: "string", + Aliases: []string{"paragraph", "trendy", "unconventional", "vocabulary", "culture", "modern"}, + Keywords: []string{"hipster", "showcasing", "meditation", "cold-pressed", "raw", "denim", "fingerstache", "normcore", "sriracha"}, + Params: []Param{ + {Field: "paragraphcount", Display: "Paragraph Count", Type: "int", Default: "2", Description: "Number of paragraphs"}, + {Field: "sentencecount", Display: "Sentence Count", Type: "int", Default: "2", Description: "Number of sentences in a paragraph"}, + {Field: "wordcount", Display: "Word Count", Type: "int", Default: "5", Description: "Number of words in a sentence"}, + {Field: "paragraphseparator", Display: "Paragraph Separator", Type: "string", Default: "
", Description: "String value to add between paragraphs"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + paragraphCount, err := info.GetInt(m, "paragraphcount") + if err != nil { + return nil, err + } + if paragraphCount <= 0 || paragraphCount > 20 { + return nil, errors.New("invalid paragraph count, must be greater than 0, less than 20") + } + + sentenceCount, err := info.GetInt(m, "sentencecount") + if err != nil { + return nil, err + } + if sentenceCount <= 0 || sentenceCount > 20 { + return nil, errors.New("invalid sentence count, must be greater than 0, less than 20") + } + + wordCount, err := info.GetInt(m, "wordcount") + if err != nil { + return nil, err + } + if wordCount <= 0 || wordCount > 50 { + return nil, errors.New("invalid word count, must be greater than 0, less than 50") + } + + paragraphSeparator, err := info.GetString(m, "paragraphseparator") + if err != nil { + return nil, err + } + + return hipsterParagraph(f, paragraphCount, sentenceCount, wordCount, paragraphSeparator), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/html.go b/vendor/github.com/brianvoe/gofakeit/v7/html.go new file mode 100644 index 0000000000..82f5f24f53 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/html.go @@ -0,0 +1,187 @@ +package gofakeit + +import ( + "errors" + "strconv" + "strings" + + "github.com/brianvoe/gofakeit/v7/data" +) + +// InputName will return a random input field name +func InputName() string { + return inputName(GlobalFaker) +} + +// InputName will return a random input field name +func (f *Faker) InputName() string { + return inputName(f) +} + +func inputName(f *Faker) string { + return getRandValue(f, []string{"html", "input_name"}) +} + +type SVGOptions struct { + Height int + Width int + Type string + Colors []string +} + +// Generate a random svg generator +func Svg(options *SVGOptions) string { return svg(GlobalFaker, options) } + +// Generate a random svg generator +func (f *Faker) Svg(options *SVGOptions) string { return svg(f, options) } + +func svg(f *Faker, options *SVGOptions) string { + // If options is nil, set it to empty struct + if options == nil { + options = &SVGOptions{} + } + + // If options height and weight is not set, set it to random number between 100 and 500 + if options.Width == 0 { + options.Width = number(f, 100, 500) + } + widthStr := strconv.Itoa(options.Width) + if options.Height == 0 { + options.Height = number(f, 100, 500) + } + heightStr := strconv.Itoa(options.Height) + + // Check if type is set, if not set to random type + if options.Type == "" { + options.Type = randomString(f, data.GetSubData("html", "svg")) + } + + // If the colors are not set, set it to a set of nice colors + if len(options.Colors) == 0 { + options.Colors = niceColors(f) + } + + // Start svg string + svgStr := `` + + // Add a rect for the background + svgStr += `` + + // Add a random number of shapes + for i := 0; i < number(f, 10, 20); i++ { + // Add a random shape + switch options.Type { + case "rect": + svgStr += `` + case "circle": + svgStr += `` + case "ellipse": + svgStr += `` + case "line": + svgStr += `` + case "polyline": + svgStr += `` + case "polygon": + svgStr += `` + } + } + + // End svg string + svgStr += `` + + return svgStr +} + +func addHtmlLookup() { + AddFuncLookup("inputname", Info{ + Display: "Input Name", + Category: "html", + Description: "Attribute used to define the name of an input element in web forms", + Example: "first_name", + Output: "string", + Aliases: []string{ + "form field", "field name", "html input", "input identifier", "web attribute", + }, + Keywords: []string{ + "define", "attribute", "element", "parameter", "submission", "mapping", "key", "entry", "binding", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return inputName(f), nil + }, + }) + + AddFuncLookup("svg", Info{ + Display: "Image SVG", + Category: "html", + Description: "Scalable Vector Graphics used to display vector images in web content", + Example: ` + + +`, + Output: "string", + ContentType: "image/svg+xml", + Aliases: []string{ + "vector graphic", "xml image", "scalable format", "web graphic", "svg file", + }, + Keywords: []string{ + "scalable", "vector", "graphics", "image", "drawing", "markup", "shape", "color", "path", "render", + }, + + Params: []Param{ + {Field: "width", Display: "Width", Type: "int", Default: "500", Description: "Width in px"}, + {Field: "height", Display: "Height", Type: "int", Default: "500", Description: "Height in px"}, + {Field: "type", Display: "Type", Type: "string", Optional: true, Options: data.GetSubData("html", "svg"), Description: "Sub child element type"}, + {Field: "colors", Display: "Colors", Type: "[]string", Optional: true, Description: "Hex or RGB array of colors to use"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + // Setup new options + options := SVGOptions{} + var err error + + options.Width, err = info.GetInt(m, "width") + if err != nil { + return nil, err + } + if options.Width < 10 || options.Width >= 1000 { + return nil, errors.New("invalid image width, must be greater than 10, less than 1000") + } + + options.Height, err = info.GetInt(m, "height") + if err != nil { + return nil, err + } + if options.Height < 10 || options.Height >= 1000 { + return nil, errors.New("invalid image height, must be greater than 10, less than 1000") + } + + options.Type, err = info.GetString(m, "type") + svgData := data.GetSubData("html", "svg") + if err != nil { + return nil, err + } + + // If type is empty, set with random type + if options.Type == "" { + options.Type = randomString(f, svgData) + } + + // If not in date html svg type array, return error + if !stringInSlice(options.Type, svgData) { + return nil, errors.New("invalid svg type, must be one of " + strings.Join(svgData, ",")) + } + + // Get colors + options.Colors, err = info.GetStringArray(m, "colors") + if err != nil { + return nil, err + } + + // If colors is empty, set with random colors + if len(options.Colors) == 0 { + options.Colors = niceColors(f) + } + + return svg(f, &options), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/image.go b/vendor/github.com/brianvoe/gofakeit/v7/image.go new file mode 100644 index 0000000000..d20f227d1b --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/image.go @@ -0,0 +1,126 @@ +package gofakeit + +import ( + "bytes" + "errors" + img "image" + imgCol "image/color" + "image/jpeg" + "image/png" +) + +// Image generates a random rgba image +func Image(width int, height int) *img.RGBA { return image(GlobalFaker, width, height) } + +// Image generates a random rgba image +func (f *Faker) Image(width int, height int) *img.RGBA { return image(f, width, height) } + +func image(f *Faker, width int, height int) *img.RGBA { + upLeft := img.Point{0, 0} + lowRight := img.Point{width, height} + + img := img.NewRGBA(img.Rectangle{upLeft, lowRight}) + + // Set color for each pixel + for x := 0; x < width; x++ { + for y := 0; y < height; y++ { + img.Set(x, y, imgCol.RGBA{uint8(number(f, 0, 255)), uint8(number(f, 0, 255)), uint8(number(f, 0, 255)), 0xff}) + } + } + + return img +} + +// ImageJpeg generates a random rgba jpeg image +func ImageJpeg(width int, height int) []byte { return imageJpeg(GlobalFaker, width, height) } + +// ImageJpeg generates a random rgba jpeg image +func (f *Faker) ImageJpeg(width int, height int) []byte { return imageJpeg(f, width, height) } + +func imageJpeg(f *Faker, width int, height int) []byte { + buf := new(bytes.Buffer) + jpeg.Encode(buf, image(f, width, height), nil) + return buf.Bytes() +} + +// ImagePng generates a random rgba png image +func ImagePng(width int, height int) []byte { return imagePng(GlobalFaker, width, height) } + +// ImagePng generates a random rgba png image +func (f *Faker) ImagePng(width int, height int) []byte { return imagePng(f, width, height) } + +func imagePng(f *Faker, width int, height int) []byte { + buf := new(bytes.Buffer) + png.Encode(buf, image(f, width, height)) + return buf.Bytes() +} + +func addImageLookup() { + AddFuncLookup("imagejpeg", Info{ + Display: "Image JPEG", + Category: "image", + Description: "Image file format known for its efficient compression and compatibility", + Example: "file.jpeg - bytes", + Output: "[]byte", + ContentType: "image/jpeg", + Aliases: []string{"jpeg", "jpg", "format", "compression", "compatibility", "photo"}, + Keywords: []string{"image", "efficient", "file", "bytes", "known", "rgba", "pixel", "width", "height"}, + Params: []Param{ + {Field: "width", Display: "Width", Type: "int", Default: "500", Description: "Image width in px"}, + {Field: "height", Display: "Height", Type: "int", Default: "500", Description: "Image height in px"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + width, err := info.GetInt(m, "width") + if err != nil { + return nil, err + } + if width < 10 || width >= 1000 { + return nil, errors.New("invalid image width, must be greater than 10, less than 1000") + } + + height, err := info.GetInt(m, "height") + if err != nil { + return nil, err + } + if height < 10 || height >= 1000 { + return nil, errors.New("invalid image height, must be greater than 10, less than 1000") + } + + return imageJpeg(f, width, height), nil + }, + }) + + AddFuncLookup("imagepng", Info{ + Display: "Image PNG", + Category: "image", + Description: "Image file format known for its lossless compression and support for transparency", + Example: "file.png - bytes", + Output: "[]byte", + ContentType: "image/png", + Aliases: []string{"png", "format", "lossless", "compression", "transparency", "graphic"}, + Keywords: []string{"image", "support", "file", "bytes", "known", "rgba", "pixel", "width", "height"}, + Params: []Param{ + {Field: "width", Display: "Width", Type: "int", Default: "500", Description: "Image width in px"}, + {Field: "height", Display: "Height", Type: "int", Default: "500", Description: "Image height in px"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + width, err := info.GetInt(m, "width") + if err != nil { + return nil, err + } + if width < 10 || width >= 1000 { + return nil, errors.New("invalid image width, must be greater than 10, less than 1000") + } + + height, err := info.GetInt(m, "height") + if err != nil { + return nil, err + } + if height < 10 || height >= 1000 { + return nil, errors.New("invalid image height, must be greater than 10, less than 1000") + } + + return imagePng(f, width, height), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/internet.go b/vendor/github.com/brianvoe/gofakeit/v7/internet.go new file mode 100644 index 0000000000..2f121f1df4 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/internet.go @@ -0,0 +1,473 @@ +package gofakeit + +import ( + "fmt" + "strconv" + "strings" + + "github.com/brianvoe/gofakeit/v7/data" +) + +// DomainName will generate a random url domain name +func DomainName() string { return domainName(GlobalFaker) } + +// DomainName will generate a random url domain name +func (f *Faker) DomainName() string { return domainName(f) } + +func domainName(f *Faker) string { + name := strings.Replace(strings.ToLower(jobDescriptor(f)+bs(f)), " ", "", -1) + + return fmt.Sprintf("%s.%s", name, domainSuffix(f)) +} + +// DomainSuffix will generate a random domain suffix +func DomainSuffix() string { return domainSuffix(GlobalFaker) } + +// DomainSuffix will generate a random domain suffix +func (f *Faker) DomainSuffix() string { return domainSuffix(f) } + +func domainSuffix(f *Faker) string { + return getRandValue(f, []string{"internet", "domain_suffix"}) +} + +// URL will generate a random url string +func URL() string { return url(GlobalFaker) } + +// URL will generate a random url string +func (f *Faker) URL() string { return url(f) } + +func url(f *Faker) string { + // Slugs + num := number(f, 1, 4) + slug := make([]string, num) + for i := 0; i < num; i++ { + slug[i] = bs(f) + } + + scheme := randomString(f, []string{"https", "http"}) + path := strings.ToLower(strings.Join(slug, "/")) + + url := fmt.Sprintf("%s://www.%s/%s", scheme, domainName(f), path) + url = strings.Replace(url, " ", "", -1) + + return url +} + +// HTTPMethod will generate a random http method +func HTTPMethod() string { return httpMethod(GlobalFaker) } + +// HTTPMethod will generate a random http method +func (f *Faker) HTTPMethod() string { return httpMethod(f) } + +func httpMethod(f *Faker) string { + return getRandValue(f, []string{"internet", "http_method"}) +} + +// IPv4Address will generate a random version 4 ip address +func IPv4Address() string { return ipv4Address(GlobalFaker) } + +// IPv4Address will generate a random version 4 ip address +func (f *Faker) IPv4Address() string { return ipv4Address(f) } + +func ipv4Address(f *Faker) string { + num := func() int { return f.IntN(256) } + + return fmt.Sprintf("%d.%d.%d.%d", num(), num(), num(), num()) +} + +// IPv6Address will generate a random version 6 ip address +func IPv6Address() string { return ipv6Address(GlobalFaker) } + +// IPv6Address will generate a random version 6 ip address +func (f *Faker) IPv6Address() string { return ipv6Address(f) } + +func ipv6Address(f *Faker) string { + num := func() int { return f.IntN(65536) } + + return fmt.Sprintf("%x:%x:%x:%x:%x:%x:%x:%x", num(), num(), num(), num(), num(), num(), num(), num()) +} + +// MacAddress will generate a random mac address +func MacAddress() string { return macAddress(GlobalFaker) } + +// MacAddress will generate a random mac address +func (f *Faker) MacAddress() string { return macAddress(f) } + +func macAddress(f *Faker) string { + num := 255 + + return fmt.Sprintf("%02x:%02x:%02x:%02x:%02x:%02x", f.IntN(num), f.IntN(num), f.IntN(num), f.IntN(num), f.IntN(num), f.IntN(num)) +} + +// HTTPStatusCode will generate a random status code +func HTTPStatusCode() int { return httpStatusCode(GlobalFaker) } + +// HTTPStatusCode will generate a random status code +func (f *Faker) HTTPStatusCode() int { return httpStatusCode(f) } + +func httpStatusCode(f *Faker) int { + randInt, _ := strconv.Atoi(getRandValue(f, []string{"internet", "http_status_general"})) + return randInt +} + +// HTTPStatusCodeSimple will generate a random simple status code +func HTTPStatusCodeSimple() int { return httpStatusCodeSimple(GlobalFaker) } + +// HTTPStatusCodeSimple will generate a random simple status code +func (f *Faker) HTTPStatusCodeSimple() int { return httpStatusCodeSimple(f) } + +func httpStatusCodeSimple(f *Faker) int { + randInt, _ := strconv.Atoi(getRandValue(f, []string{"internet", "http_status_simple"})) + return randInt +} + +// LogLevel will generate a random log level +// See data/LogLevels for list of available levels +func LogLevel(logType string) string { return logLevel(GlobalFaker, logType) } + +// LogLevel will generate a random log level +// See data/LogLevels for list of available levels +func (f *Faker) LogLevel(logType string) string { return logLevel(f, logType) } + +func logLevel(f *Faker, logType string) string { + if _, ok := data.LogLevels[logType]; ok { + return getRandValue(f, []string{"log_level", logType}) + } + + return getRandValue(f, []string{"log_level", "general"}) +} + +// UserAgent will generate a random broswer user agent +func UserAgent() string { return userAgent(GlobalFaker) } + +// UserAgent will generate a random broswer user agent +func (f *Faker) UserAgent() string { return userAgent(f) } + +func userAgent(f *Faker) string { + randNum := randIntRange(f, 0, 4) + switch randNum { + case 0: + return chromeUserAgent(f) + case 1: + return firefoxUserAgent(f) + case 2: + return safariUserAgent(f) + case 3: + return operaUserAgent(f) + default: + return chromeUserAgent(f) + } +} + +// ChromeUserAgent will generate a random chrome browser user agent string +func ChromeUserAgent() string { return chromeUserAgent(GlobalFaker) } + +// ChromeUserAgent will generate a random chrome browser user agent string +func (f *Faker) ChromeUserAgent() string { return chromeUserAgent(f) } + +func chromeUserAgent(f *Faker) string { + randNum1 := strconv.Itoa(randIntRange(f, 531, 536)) + strconv.Itoa(randIntRange(f, 0, 2)) + randNum2 := strconv.Itoa(randIntRange(f, 36, 40)) + randNum3 := strconv.Itoa(randIntRange(f, 800, 899)) + return "Mozilla/5.0 " + "(" + randomPlatform(f) + ") AppleWebKit/" + randNum1 + " (KHTML, like Gecko) Chrome/" + randNum2 + ".0." + randNum3 + ".0 Mobile Safari/" + randNum1 +} + +// FirefoxUserAgent will generate a random firefox broswer user agent string +func FirefoxUserAgent() string { return firefoxUserAgent(GlobalFaker) } + +// FirefoxUserAgent will generate a random firefox broswer user agent string +func (f *Faker) FirefoxUserAgent() string { return firefoxUserAgent(f) } + +func firefoxUserAgent(f *Faker) string { + ver := "Gecko/" + date(f).Format("2006-01-02") + " Firefox/" + strconv.Itoa(randIntRange(f, 35, 37)) + ".0" + platforms := []string{ + "(" + windowsPlatformToken(f) + "; " + "en-US" + "; rv:1.9." + strconv.Itoa(randIntRange(f, 0, 3)) + ".20) " + ver, + "(" + linuxPlatformToken(f) + "; rv:" + strconv.Itoa(randIntRange(f, 5, 8)) + ".0) " + ver, + "(" + macPlatformToken(f) + " rv:" + strconv.Itoa(randIntRange(f, 2, 7)) + ".0) " + ver, + } + + return "Mozilla/5.0 " + randomString(f, platforms) +} + +// SafariUserAgent will generate a random safari browser user agent string +func SafariUserAgent() string { return safariUserAgent(GlobalFaker) } + +// SafariUserAgent will generate a random safari browser user agent string +func (f *Faker) SafariUserAgent() string { return safariUserAgent(f) } + +func safariUserAgent(f *Faker) string { + randNum := strconv.Itoa(randIntRange(f, 531, 536)) + "." + strconv.Itoa(randIntRange(f, 1, 51)) + "." + strconv.Itoa(randIntRange(f, 1, 8)) + ver := strconv.Itoa(randIntRange(f, 4, 6)) + "." + strconv.Itoa(randIntRange(f, 0, 2)) + + mobileDevices := []string{ + "iPhone; CPU iPhone OS", + "iPad; CPU OS", + } + + platforms := []string{ + "(Windows; U; " + windowsPlatformToken(f) + ") AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + ver + " Safari/" + randNum, + "(" + macPlatformToken(f) + " rv:" + strconv.Itoa(randIntRange(f, 4, 7)) + ".0; en-US) AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + ver + " Safari/" + randNum, + "(" + randomString(f, mobileDevices) + " " + strconv.Itoa(randIntRange(f, 7, 9)) + "_" + strconv.Itoa(randIntRange(f, 0, 3)) + "_" + strconv.Itoa(randIntRange(f, 1, 3)) + " like Mac OS X; " + "en-US" + ") AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + strconv.Itoa(randIntRange(f, 3, 5)) + ".0.5 Mobile/8B" + strconv.Itoa(randIntRange(f, 111, 120)) + " Safari/6" + randNum, + } + + return "Mozilla/5.0 " + randomString(f, platforms) +} + +// OperaUserAgent will generate a random opera browser user agent string +func OperaUserAgent() string { return operaUserAgent(GlobalFaker) } + +// OperaUserAgent will generate a random opera browser user agent string +func (f *Faker) OperaUserAgent() string { return operaUserAgent(f) } + +func operaUserAgent(f *Faker) string { + platform := "(" + randomPlatform(f) + "; en-US) Presto/2." + strconv.Itoa(randIntRange(f, 8, 13)) + "." + strconv.Itoa(randIntRange(f, 160, 355)) + " Version/" + strconv.Itoa(randIntRange(f, 10, 13)) + ".00" + + return "Opera/" + strconv.Itoa(randIntRange(f, 8, 10)) + "." + strconv.Itoa(randIntRange(f, 10, 99)) + " " + platform +} + +// linuxPlatformToken will generate a random linux platform +func linuxPlatformToken(f *Faker) string { + return "X11; Linux " + getRandValue(f, []string{"computer", "linux_processor"}) +} + +// macPlatformToken will generate a random mac platform +func macPlatformToken(f *Faker) string { + return "Macintosh; " + getRandValue(f, []string{"computer", "mac_processor"}) + " Mac OS X 10_" + strconv.Itoa(randIntRange(f, 5, 9)) + "_" + strconv.Itoa(randIntRange(f, 0, 10)) +} + +// windowsPlatformToken will generate a random windows platform +func windowsPlatformToken(f *Faker) string { + return getRandValue(f, []string{"computer", "windows_platform"}) +} + +// randomPlatform will generate a random platform +func randomPlatform(f *Faker) string { + platforms := []string{ + linuxPlatformToken(f), + macPlatformToken(f), + windowsPlatformToken(f), + } + + return randomString(f, platforms) +} + +// HTTPVersion will generate a random http version +func HTTPVersion() string { return httpVersion(GlobalFaker) } + +// HTTPVersion will generate a random http version +func (f *Faker) HTTPVersion() string { return httpVersion(f) } + +func httpVersion(f *Faker) string { + return getRandValue(f, []string{"internet", "http_version"}) +} + +func addInternetLookup() { + AddFuncLookup("url", Info{ + Display: "URL", + Category: "internet", + Description: "Web address that specifies the location of a resource on the internet", + Example: "http://www.principalproductize.biz/target", + Output: "string", + Aliases: []string{"url string", "web address", "internet link", "website url", "resource locator"}, + Keywords: []string{"url", "web", "address", "http", "https", "www", "protocol", "scheme", "path", "domain", "location", "resource"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return url(f), nil + }, + }) + + AddFuncLookup("domainname", Info{ + Display: "Domain Name", + Category: "internet", + Description: "Human-readable web address used to identify websites on the internet", + Example: "centraltarget.biz", + Output: "string", + Aliases: []string{"domain name", "website name", "internet domain", "dns name", "site domain"}, + Keywords: []string{"domain", "name", "web", "address", "dns", "hostname", "resolve", "centraltarget", "biz", "website"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return domainName(f), nil + }, + }) + + AddFuncLookup("domainsuffix", Info{ + Display: "Domain Suffix", + Category: "internet", + Description: "The part of a domain name that comes after the last dot, indicating its type or purpose", + Example: "org", + Output: "string", + Aliases: []string{"domain suffix", "domain extension", "top level domain", "domain ending"}, + Keywords: []string{"domain", "suffix", "tld", "top-level", "extension", "org", "com", "net", "gov", "edu", "mil", "int"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return domainSuffix(f), nil + }, + }) + + AddFuncLookup("ipv4address", Info{ + Display: "IPv4 Address", + Category: "internet", + Description: "Numerical label assigned to devices on a network for identification and communication", + Example: "222.83.191.222", + Output: "string", + Aliases: []string{"ip address", "network address", "internet address", "device ip", "ipv4 label"}, + Keywords: []string{"ipv4", "ip", "network", "internet", "protocol", "communication", "dotted", "decimal"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return ipv4Address(f), nil + }, + }) + + AddFuncLookup("ipv6address", Info{ + Display: "IPv6 Address", + Category: "internet", + Description: "Numerical label assigned to devices on a network, providing a larger address space than IPv4 for internet communication", + Example: "2001:cafe:8898:ee17:bc35:9064:5866:d019", + Output: "string", + Aliases: []string{"ip address", "network address", "internet address", "hex ip", "ipv6 label"}, + Keywords: []string{"ipv6", "ip", "network", "protocol", "hexadecimal", "identification"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return ipv6Address(f), nil + }, + }) + + AddFuncLookup("httpmethod", Info{ + Display: "HTTP Method", + Category: "internet", + Description: "Verb used in HTTP requests to specify the desired action to be performed on a resource", + Example: "HEAD", + Output: "string", + Aliases: []string{"http verb", "http action", "http request", "http command", "method name"}, + Keywords: []string{"http", "method", "verb", "get", "post", "put", "delete", "patch", "options", "head", "request", "action"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return httpMethod(f), nil + }, + }) + + AddFuncLookup("loglevel", Info{ + Display: "Log Level", + Category: "internet", + Description: "Classification used in logging to indicate the severity or priority of a log entry", + Example: "error", + Output: "string", + Aliases: []string{"log severity", "logging level", "log classification", "priority level", "event level"}, + Keywords: []string{"log", "level", "severity", "priority", "classification", "error", "warn", "info", "debug", "trace", "fatal", "critical"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return logLevel(f, ""), nil + }, + }) + + AddFuncLookup("useragent", Info{ + Display: "User Agent", + Category: "internet", + Description: "String sent by a web browser to identify itself when requesting web content", + Example: "Mozilla/5.0 (Windows NT 5.0) AppleWebKit/5362 (KHTML, like Gecko) Chrome/37.0.834.0 Mobile Safari/5362", + Output: "string", + Aliases: []string{"ua string", "browser ua", "http user agent", "client identifier", "browser identifier"}, + Keywords: []string{"useragent", "browser", "http", "request", "mozilla", "applewebkit", "chrome", "firefox", "safari", "opera", "mobile"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return userAgent(f), nil + }, + }) + + AddFuncLookup("chromeuseragent", Info{ + Display: "Chrome User Agent", + Category: "internet", + Description: "The specific identification string sent by the Google Chrome web browser when making requests on the internet", + Example: "Mozilla/5.0 (X11; Linux i686) AppleWebKit/5312 (KHTML, like Gecko) Chrome/39.0.836.0 Mobile Safari/5312", + Output: "string", + Aliases: []string{"chrome ua", "chrome browser ua", "google chrome ua", "chrome identifier", "chrome user agent"}, + Keywords: []string{"chrome", "google", "browser", "ua", "useragent", "applewebkit", "khtml", "gecko", "safari", "version"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return chromeUserAgent(f), nil + }, + }) + + AddFuncLookup("firefoxuseragent", Info{ + Display: "Firefox User Agent", + Category: "internet", + Description: "The specific identification string sent by the Firefox web browser when making requests on the internet", + Example: "Mozilla/5.0 (Macintosh; U; PPC Mac OS X 10_8_3 rv:7.0) Gecko/1900-07-01 Firefox/37.0", + Output: "string", + Aliases: []string{"firefox ua", "firefox browser ua", "mozilla firefox ua", "gecko ua", "firefox identifier"}, + Keywords: []string{"firefox", "mozilla", "browser", "ua", "useragent", "gecko", "macintosh", "ppc", "version"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return firefoxUserAgent(f), nil + }, + }) + + AddFuncLookup("operauseragent", Info{ + Display: "Opera User Agent", + Category: "internet", + Description: "The specific identification string sent by the Opera web browser when making requests on the internet", + Example: "Opera/8.39 (Macintosh; U; PPC Mac OS X 10_8_7; en-US) Presto/2.9.335 Version/10.00", + Output: "string", + Aliases: []string{"opera ua", "opera browser ua", "opera identifier", "opera client", "opera user agent"}, + Keywords: []string{"opera", "presto", "ua", "browser", "useragent", "macintosh", "ppc", "os", "version"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return operaUserAgent(f), nil + }, + }) + + AddFuncLookup("safariuseragent", Info{ + Display: "Safari User Agent", + Category: "internet", + Description: "The specific identification string sent by the Safari web browser when making requests on the internet", + Example: "Mozilla/5.0 (iPad; CPU OS 8_3_2 like Mac OS X; en-US) AppleWebKit/531.15.6 (KHTML, like Gecko) Version/4.0.5 Mobile/8B120 Safari/6531.15.6", + Output: "string", + Aliases: []string{"safari ua", "apple safari ua", "safari browser ua", "safari identifier", "safari user agent"}, + Keywords: []string{"safari", "apple", "ipad", "os", "applewebkit", "khtml", "gecko", "browser", "ua", "mobile"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return safariUserAgent(f), nil + }, + }) + + AddFuncLookup("httpstatuscode", Info{ + Display: "HTTP Status Code", + Category: "internet", + Description: "Random HTTP status code", + Example: "200", + Output: "int", + Aliases: []string{"http status", "response code", "http response", "server status", "status identifier"}, + Keywords: []string{"http", "status", "code", "server", "response", "200", "404", "500", "301", "302", "401", "403"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return httpStatusCode(f), nil + }, + }) + + AddFuncLookup("httpstatuscodesimple", Info{ + Display: "HTTP Status Code Simple", + Category: "internet", + Description: "Three-digit number returned by a web server to indicate the outcome of an HTTP request", + Example: "404", + Output: "int", + Aliases: []string{"http status simple", "simple response code", "http response simple", "status code", "server code"}, + Keywords: []string{"http", "status", "code", "server", "response", "200", "404", "500", "301", "302", "401", "403"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return httpStatusCodeSimple(f), nil + }, + }) + + AddFuncLookup("httpversion", Info{ + Display: "HTTP Version", + Category: "internet", + Description: "Number indicating the version of the HTTP protocol used for communication between a client and a server", + Example: "HTTP/1.1", + Output: "string", + Aliases: []string{"http version", "protocol version", "http protocol", "http identifier", "http version string"}, + Keywords: []string{"http", "version", "protocol", "communication", "client", "server"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return httpVersion(f), nil + }, + }) + + AddFuncLookup("macaddress", Info{ + Display: "MAC Address", + Category: "internet", + Description: "Unique identifier assigned to network interfaces, often used in Ethernet networks", + Example: "cb:ce:06:94:22:e9", + Output: "string", + Aliases: []string{"mac address", "hardware address", "ethernet address", "network identifier", "link-layer address"}, + Keywords: []string{"mac", "address", "hardware", "ethernet", "network", "identifier", "oui", "vendor", "colon", "hexadecimal"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return macAddress(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/json.go b/vendor/github.com/brianvoe/gofakeit/v7/json.go new file mode 100644 index 0000000000..39203123ab --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/json.go @@ -0,0 +1,350 @@ +package gofakeit + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "reflect" + "strconv" +) + +// JSONOptions defines values needed for json generation +type JSONOptions struct { + Type string `json:"type" xml:"type" fake:"{randomstring:[array,object]}"` // array or object + RowCount int `json:"row_count" xml:"row_count" fake:"{number:1,10}"` + Indent bool `json:"indent" xml:"indent"` + Fields []Field `json:"fields" xml:"fields" fake:"{fields}"` +} + +type jsonKeyVal struct { + Key string + Value any +} + +type jsonOrderedKeyVal []*jsonKeyVal + +func (okv jsonOrderedKeyVal) MarshalJSON() ([]byte, error) { + var buf bytes.Buffer + + buf.WriteString("{") + for i, kv := range okv { + // Add comma to all except last one + if i != 0 { + buf.WriteString(",") + } + + // Marshal key and write + key, err := json.Marshal(kv.Key) + if err != nil { + return nil, err + } + buf.Write(key) + + // Write colon separator + buf.WriteString(":") + + // Marshal value and write + val, err := json.Marshal(kv.Value) + if err != nil { + return nil, err + } + buf.Write(val) + } + buf.WriteString("}") + + return buf.Bytes(), nil +} + +// JSON generates an object or an array of objects in json format. +// A nil JSONOptions returns a randomly structured JSON. +func JSON(jo *JSONOptions) ([]byte, error) { return jsonFunc(GlobalFaker, jo) } + +// JSON generates an object or an array of objects in json format. +// A nil JSONOptions returns a randomly structured JSON. +func (f *Faker) JSON(jo *JSONOptions) ([]byte, error) { return jsonFunc(f, jo) } + +// JSON generates an object or an array of objects in json format +func jsonFunc(f *Faker, jo *JSONOptions) ([]byte, error) { + if jo == nil { + // We didn't get a JSONOptions, so create a new random one + err := f.Struct(&jo) + if err != nil { + return nil, err + } + } + + // Check to make sure they passed in a type + if jo.Type != "array" && jo.Type != "object" { + return nil, errors.New("invalid type, must be array or object") + } + + if len(jo.Fields) <= 0 { + return nil, errors.New("must pass fields in order to build json object(s)") + } + + if jo.Type == "object" { + v := make(jsonOrderedKeyVal, len(jo.Fields)) + + // Loop through fields and add to them to map[string]any + for i, field := range jo.Fields { + if field.Function == "autoincrement" { + // Object only has one + v[i] = &jsonKeyVal{Key: field.Name, Value: 1} + continue + } + + // Get function info + funcInfo := GetFuncLookup(field.Function) + if funcInfo == nil { + return nil, errors.New("invalid function, " + field.Function + " does not exist") + } + + // Call function value + value, err := funcInfo.Generate(f, &field.Params, funcInfo) + if err != nil { + return nil, err + } + + if _, ok := value.([]byte); ok { + // If it's a slice, unmarshal it into an interface + var val any + err := json.Unmarshal(value.([]byte), &val) + if err != nil { + return nil, err + } + value = val + } + + v[i] = &jsonKeyVal{Key: field.Name, Value: value} + + } + + // Marshal into bytes + if jo.Indent { + j, _ := json.MarshalIndent(v, "", " ") + return j, nil + } + + j, _ := json.Marshal(v) + return j, nil + } + + if jo.Type == "array" { + // Make sure you set a row count + if jo.RowCount <= 0 { + return nil, errors.New("must have row count") + } + + v := make([]jsonOrderedKeyVal, jo.RowCount) + + for i := 0; i < int(jo.RowCount); i++ { + vr := make(jsonOrderedKeyVal, len(jo.Fields)) + + // Loop through fields and add to them to map[string]any + for ii, field := range jo.Fields { + if field.Function == "autoincrement" { + vr[ii] = &jsonKeyVal{Key: field.Name, Value: i + 1} // +1 because index starts with 0 + continue + } + + // Get function info + funcInfo := GetFuncLookup(field.Function) + if funcInfo == nil { + return nil, errors.New("invalid function, " + field.Function + " does not exist") + } + + // Call function value + value, err := funcInfo.Generate(f, &field.Params, funcInfo) + if err != nil { + return nil, err + } + + if _, ok := value.([]byte); ok { + // If it's a slice, unmarshal it into an interface + var val any + err := json.Unmarshal(value.([]byte), &val) + if err != nil { + return nil, err + } + value = val + } + + vr[ii] = &jsonKeyVal{Key: field.Name, Value: value} + } + + v[i] = vr + } + + // Marshal into bytes + if jo.Indent { + j, _ := json.MarshalIndent(v, "", " ") + return j, nil + } + + j, _ := json.Marshal(v) + return j, nil + } + + return nil, errors.New("invalid type, must be array or object") +} + +func addFileJSONLookup() { + AddFuncLookup("json", Info{ + Display: "JSON", + Category: "file", + Description: "Format for structured data interchange used in programming, returns an object or an array of objects", + Example: `[ + { "first_name": "Markus", "last_name": "Moen", "password": "Dc0VYXjkWABx" }, + { "first_name": "Osborne", "last_name": "Hilll", "password": "XPJ9OVNbs5lm" }, + { "first_name": "Mertie", "last_name": "Halvorson", "password": "eyl3bhwfV8wA" } + ]`, + Output: "[]byte", + ContentType: "application/json", + Aliases: []string{"data", "interchange", "structured", "format", "serialization", "api"}, + Keywords: []string{"json", "object", "array", "fields", "indent", "rowcount", "type", "serialize", "deserialize", "marshal", "unmarshal"}, + Params: []Param{ + {Field: "type", Display: "Type", Type: "string", Default: "object", Options: []string{"object", "array"}, Description: "Type of JSON, object or array"}, + {Field: "rowcount", Display: "Row Count", Type: "int", Default: "100", Description: "Number of rows in JSON array"}, + {Field: "indent", Display: "Indent", Type: "bool", Default: "false", Description: "Whether or not to add indents and newlines"}, + {Field: "fields", Display: "Fields", Type: "[]Field", Description: "Fields containing key name and function to run in json format"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + jo := JSONOptions{} + + typ, err := info.GetString(m, "type") + if err != nil { + return nil, err + } + jo.Type = typ + + rowcount, err := info.GetInt(m, "rowcount") + if err != nil { + return nil, err + } + jo.RowCount = rowcount + + indent, err := info.GetBool(m, "indent") + if err != nil { + return nil, err + } + jo.Indent = indent + + fieldsStr, err := info.GetStringArray(m, "fields") + if err != nil { + return nil, err + } + + // Check to make sure fields has length + if len(fieldsStr) > 0 { + jo.Fields = make([]Field, len(fieldsStr)) + + for i, f := range fieldsStr { + // Unmarshal fields string into fields array + err = json.Unmarshal([]byte(f), &jo.Fields[i]) + if err != nil { + return nil, err + } + } + } + + return jsonFunc(f, &jo) + }, + }) +} + +// encoding/json.RawMessage is a special case of []byte +// it cannot be handled as a reflect.Array/reflect.Slice +// because it needs additional structure in the output +func rJsonRawMessage(f *Faker, v reflect.Value, tag string) error { + if tag != "" { + err := rCustom(f, v, tag) + if err == nil { + jsonData := v.Bytes() + if !json.Valid(jsonData) { + fName, _ := parseNameAndParamsFromTag(tag) + return errors.New("custom function " + fName + " returned invalid json data: " + string(jsonData)) + } + } + return err + } + + b, err := f.JSON(nil) + if err != nil { + return err + } + + v.SetBytes(b) + return nil +} + +// encoding/json.Number is a special case of string +// that represents a JSON number literal. +// It cannot be handled as a string because it needs to +// represent an integer or a floating-point number. +func rJsonNumber(f *Faker, v reflect.Value, tag string) error { + var ret json.Number + + var numberType string + + if tag == "" { + numberType = f.RandomString([]string{"int", "float"}) + + switch numberType { + case "int": + retInt := f.Int16() + ret = json.Number(strconv.Itoa(int(retInt))) + case "float": + retFloat := f.Float64() + ret = json.Number(strconv.FormatFloat(retFloat, 'f', -1, 64)) + } + } else { + fName, fParams := parseNameAndParamsFromTag(tag) + info := GetFuncLookup(fName) + if info == nil { + return fmt.Errorf("invalid function, %s does not exist", fName) + } + + // Parse map params + mapParams, err := parseMapParams(info, fParams) + if err != nil { + return err + } + + valueIface, err := info.Generate(f, mapParams, info) + if err != nil { + return err + } + + switch value := valueIface.(type) { + case int: + ret = json.Number(strconv.FormatInt(int64(value), 10)) + case int8: + ret = json.Number(strconv.FormatInt(int64(value), 10)) + case int16: + ret = json.Number(strconv.FormatInt(int64(value), 10)) + case int32: + ret = json.Number(strconv.FormatInt(int64(value), 10)) + case int64: + ret = json.Number(strconv.FormatInt(int64(value), 10)) + case uint: + ret = json.Number(strconv.FormatUint(uint64(value), 10)) + case uint8: + ret = json.Number(strconv.FormatUint(uint64(value), 10)) + case uint16: + ret = json.Number(strconv.FormatUint(uint64(value), 10)) + case uint32: + ret = json.Number(strconv.FormatUint(uint64(value), 10)) + case uint64: + ret = json.Number(strconv.FormatUint(uint64(value), 10)) + case float32: + ret = json.Number(strconv.FormatFloat(float64(value), 'f', -1, 64)) + case float64: + ret = json.Number(strconv.FormatFloat(float64(value), 'f', -1, 64)) + default: + return fmt.Errorf("invalid type, %s is not a valid type for json.Number", reflect.TypeOf(value)) + } + } + v.Set(reflect.ValueOf(ret)) + return nil +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/languages.go b/vendor/github.com/brianvoe/gofakeit/v7/languages.go new file mode 100644 index 0000000000..e0f6ccf695 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/languages.go @@ -0,0 +1,89 @@ +package gofakeit + +// Language will return a random language +func Language() string { return language(GlobalFaker) } + +// Language will return a random language +func (f *Faker) Language() string { return language(f) } + +func language(f *Faker) string { return getRandValue(f, []string{"language", "long"}) } + +// LanguageAbbreviation will return a random language abbreviation +func LanguageAbbreviation() string { return languageAbbreviation(GlobalFaker) } + +// LanguageAbbreviation will return a random language abbreviation +func (f *Faker) LanguageAbbreviation() string { return languageAbbreviation(f) } + +func languageAbbreviation(f *Faker) string { return getRandValue(f, []string{"language", "short"}) } + +// LanguageBCP will return a random language BCP (Best Current Practices) +func LanguageBCP() string { return languageBCP(GlobalFaker) } + +// LanguageBCP will return a random language BCP (Best Current Practices) +func (f *Faker) LanguageBCP() string { return languageBCP(f) } + +func languageBCP(f *Faker) string { return getRandValue(f, []string{"language", "bcp"}) } + +// ProgrammingLanguage will return a random programming language +func ProgrammingLanguage() string { return programmingLanguage(GlobalFaker) } + +// ProgrammingLanguage will return a random programming language +func (f *Faker) ProgrammingLanguage() string { return programmingLanguage(f) } + +func programmingLanguage(f *Faker) string { + return getRandValue(f, []string{"language", "programming"}) +} + +func addLanguagesLookup() { + AddFuncLookup("language", Info{ + Display: "Language", + Category: "language", + Description: "System of communication using symbols, words, and grammar to convey meaning between individuals", + Example: "Kazakh", + Output: "string", + Aliases: []string{"spoken tongue", "dialect name", "native language", "speech form", "linguistic system"}, + Keywords: []string{"language", "communication", "symbols", "words", "grammar", "meaning", "system", "convey", "individuals"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return language(f), nil + }, + }) + + AddFuncLookup("languageabbreviation", Info{ + Display: "Language Abbreviation", + Category: "language", + Description: "Shortened form of a language's name", + Example: "kk", + Output: "string", + Aliases: []string{"language code", "iso code", "locale code", "short form", "abbreviated tag"}, + Keywords: []string{"abbreviation", "identifier", "shortened", "representation", "two-letter", "three-letter", "standard", "locale"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return languageAbbreviation(f), nil + }, + }) + + AddFuncLookup("languagebcp", Info{ + Display: "Language BCP", + Category: "language", + Description: "Set of guidelines and standards for identifying and representing languages in computing and internet protocols", + Example: "en-US", + Output: "string", + Aliases: []string{"bcp47 tag", "language tag", "locale identifier", "regional code", "protocol language code"}, + Keywords: []string{"guidelines", "standards", "rfc", "internet", "protocols", "representation", "locale", "region", "country"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return languageBCP(f), nil + }, + }) + + AddFuncLookup("programminglanguage", Info{ + Display: "Programming Language", + Category: "language", + Description: "Formal system of instructions used to create software and perform computational tasks", + Example: "Go", + Output: "string", + Aliases: []string{"coding language", "scripting language", "software language", "development language", "computer language"}, + Keywords: []string{"programming", "instructions", "formal", "system", "tasks", "development", "compilation", "execution"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return programmingLanguage(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/logo.png b/vendor/github.com/brianvoe/gofakeit/v7/logo.png new file mode 100644 index 0000000000..1f40f061a2 Binary files /dev/null and b/vendor/github.com/brianvoe/gofakeit/v7/logo.png differ diff --git a/vendor/github.com/brianvoe/gofakeit/v7/lookup.go b/vendor/github.com/brianvoe/gofakeit/v7/lookup.go new file mode 100644 index 0000000000..9ac1987bd2 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/lookup.go @@ -0,0 +1,515 @@ +package gofakeit + +import ( + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + "sync" +) + +// FuncLookups is the primary map array with mapping to all available data +var FuncLookups map[string]Info +var lockFuncLookups sync.Mutex + +// MapParams is the values to pass into a lookup generate +type MapParams map[string]MapParamsValue + +type MapParamsValue []string + +// Info structures fields to better break down what each one generates +type Info struct { + Display string `json:"display"` // display name + Category string `json:"category"` // category + Description string `json:"description"` // description + Example string `json:"example"` // example + Output string `json:"output"` // output type + Aliases []string `json:"aliases"` // alt names users might type + Keywords []string `json:"keywords"` // free words and domain terms + ContentType string `json:"content_type"` // content type + Params []Param `json:"params"` // params + Any any `json:"any"` // any + Generate func(f *Faker, m *MapParams, info *Info) (any, error) `json:"-"` // generate function +} + +// Param is a breakdown of param requirements and type definition +type Param struct { + Field string `json:"field"` + Display string `json:"display"` + Type string `json:"type"` + Optional bool `json:"optional"` + Default string `json:"default"` + Options []string `json:"options"` + Description string `json:"description"` +} + +// Field is used for defining what name and function you to generate for file outuputs +type Field struct { + Name string `json:"name"` + Function string `json:"function"` + Params MapParams `json:"params"` +} + +func init() { initLookup() } + +// init will add all the functions to MapLookups +func initLookup() { + addAddressLookup() + addAnimalLookup() + addAppLookup() + addAuthLookup() + addBeerLookup() + addBookLookup() + addCarLookup() + addCelebrityLookup() + addColorLookup() + addCompanyLookup() + addDatabaseSQLLookup() + addDateTimeLookup() + addEmojiLookup() + addErrorLookup() + addFileCSVLookup() + addFileJSONLookup() + addFileLookup() + addFileXMLLookup() + addFinanceLookup() + addFoodLookup() + addGameLookup() + addGenerateLookup() + addHackerLookup() + addHipsterLookup() + addHtmlLookup() + addImageLookup() + addInternetLookup() + addLanguagesLookup() + addMinecraftLookup() + addMiscLookup() + addMovieLookup() + addNumberLookup() + addPaymentLookup() + addPersonLookup() + addProductLookup() + addSchoolLookup() + addSongLookup() + addStringLookup() + addTemplateLookup() + addWeightedLookup() + addWordAdjectiveLookup() + addWordAdverbLookup() + addWordConnectiveLookup() + addWordGeneralLookup() + addWordGrammerLookup() + addWordNounLookup() + addWordPrepositionLookup() + addWordPronounLookup() + addWordVerbLookup() + addWordMiscLookup() + addTextLookup() +} + +// internalFuncLookups is the internal map array with mapping to all available data +var internalFuncLookups map[string]Info = map[string]Info{ + "fields": { + Description: "Example fields for generating csv, json, xml, etc", + Output: "gofakeit.Field", + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + function, _ := GetRandomSimpleFunc(f) + return Field{ + Name: function, + Function: function, + }, nil + }, + }, +} + +// NewMapParams will create a new MapParams +func NewMapParams() *MapParams { + return &MapParams{} +} + +// Add will take in a field and value and add it to the map params type +func (m *MapParams) Add(field string, value string) { + _, ok := (*m)[field] + if !ok { + (*m)[field] = []string{value} + return + } + + (*m)[field] = append((*m)[field], value) +} + +// Get will return the array of string from the provided field +func (m *MapParams) Get(field string) []string { + return (*m)[field] +} + +// Size will return the total size of the underlying map +func (m *MapParams) Size() int { + size := 0 + for range *m { + size++ + } + return size +} + +// UnmarshalJSON will unmarshal the json into the []string +func (m *MapParamsValue) UnmarshalJSON(data []byte) error { + // check if the data is an array + // if so, marshal it into m + if data[0] == '[' { + var values []any + err := json.Unmarshal(data, &values) + if err != nil { + return err + } + + // convert the values to array of strings + for _, value := range values { + typeOf := reflect.TypeOf(value).Kind().String() + + if typeOf == "map" { + v, err := json.Marshal(value) + if err != nil { + return err + } + *m = append(*m, string(v)) + } else { + *m = append(*m, fmt.Sprintf("%v", value)) + } + } + return nil + } + + // if not, then convert into a string and add it to m + var s any + if err := json.Unmarshal(data, &s); err != nil { + return err + } + + *m = append(*m, fmt.Sprintf("%v", s)) + return nil +} + +func GetRandomSimpleFunc(f *Faker) (string, Info) { + // Loop through all the functions and add them to a slice + var keys []string + for k, info := range FuncLookups { + // Only grab simple functions + if info.Params == nil { + keys = append(keys, k) + } + } + + // Randomly grab a function from the slice + randomKey := randomString(f, keys) + + // Return the function name and info + return randomKey, FuncLookups[randomKey] +} + +// AddFuncLookup takes a field and adds it to map +func AddFuncLookup(functionName string, info Info) { + if FuncLookups == nil { + FuncLookups = make(map[string]Info) + } + + // Check content type + if info.ContentType == "" { + info.ContentType = "text/plain" + } + + lockFuncLookups.Lock() + FuncLookups[functionName] = info + lockFuncLookups.Unlock() +} + +// GetFuncLookup will lookup +func GetFuncLookup(functionName string) *Info { + var info Info + var ok bool + + // Check internal functions first + info, ok = internalFuncLookups[functionName] + if ok { + return &info + } + + info, ok = FuncLookups[functionName] + if ok { + return &info + } + + return nil +} + +// RemoveFuncLookup will remove a function from lookup +func RemoveFuncLookup(functionName string) { + _, ok := FuncLookups[functionName] + if !ok { + return + } + + lockFuncLookups.Lock() + delete(FuncLookups, functionName) + lockFuncLookups.Unlock() +} + +// GetAny will retrieve Any field from Info +func (i *Info) GetAny(m *MapParams, field string) (any, error) { + _, value, err := i.GetField(m, field) + if err != nil { + return nil, err + } + + // Make sure value[0] exists + if len(value) == 0 { + return nil, fmt.Errorf("could not find field: %s", field) + } + + var anyValue any + + // Try to convert to int + valueInt, err := strconv.ParseInt(value[0], 10, 64) + if err == nil { + return int(valueInt), nil + } + + // Try to convert to float + valueFloat, err := strconv.ParseFloat(value[0], 64) + if err == nil { + return valueFloat, nil + } + + // Try to convert to boolean + valueBool, err := strconv.ParseBool(value[0]) + if err == nil { + return valueBool, nil + } + + err = json.Unmarshal([]byte(value[0]), &anyValue) + if err == nil { + return valueBool, nil + } + + return value[0], nil +} + +// GetMap will retrieve map[string]any field from data +func (i *Info) GetMap(m *MapParams, field string) (map[string]any, error) { + _, value, err := i.GetField(m, field) + if err != nil { + return nil, err + } + + var mapValue map[string]any + err = json.Unmarshal([]byte(value[0]), &mapValue) + if err != nil { + return nil, fmt.Errorf("%s field could not parse to map[string]any", field) + } + + return mapValue, nil +} + +// GetField will retrieve field from data +func (i *Info) GetField(m *MapParams, field string) (*Param, []string, error) { + // Get param + var p *Param + for _, param := range i.Params { + if param.Field == field { + p = ¶m + break + } + } + if p == nil { + return nil, nil, fmt.Errorf("could not find param field %s", field) + } + + // Get value from map + if m != nil { + value, ok := (*m)[field] + if !ok { + // If default isnt empty use default + if p.Default != "" { + return p, []string{p.Default}, nil + } + + return nil, nil, fmt.Errorf("could not find field: %s", field) + } + + return p, value, nil + } else if p.Default != "" { + // If p.Type is []uint, then we need to convert it to []string + if strings.HasPrefix(p.Default, "[") { + // Remove [] from type + defaultClean := p.Default[1 : len(p.Default)-1] + + // Split on comma + defaultSplit := strings.Split(defaultClean, ",") + + return p, defaultSplit, nil + } + + // If default isnt empty use default + return p, []string{p.Default}, nil + } + + return nil, nil, fmt.Errorf("could not find field: %s", field) +} + +// GetBool will retrieve boolean field from data +func (i *Info) GetBool(m *MapParams, field string) (bool, error) { + p, value, err := i.GetField(m, field) + if err != nil { + return false, err + } + + // Try to convert to boolean + valueBool, err := strconv.ParseBool(value[0]) + if err != nil { + return false, fmt.Errorf("%s field could not parse to bool value", p.Field) + } + + return valueBool, nil +} + +// GetInt will retrieve int field from data +func (i *Info) GetInt(m *MapParams, field string) (int, error) { + p, value, err := i.GetField(m, field) + if err != nil { + return 0, err + } + + // Try to convert to int + valueInt, err := strconv.ParseInt(value[0], 10, 64) + if err != nil { + return 0, fmt.Errorf("%s field could not parse to int value", p.Field) + } + + return int(valueInt), nil +} + +// GetUint will retrieve uint field from data +func (i *Info) GetUint(m *MapParams, field string) (uint, error) { + p, value, err := i.GetField(m, field) + if err != nil { + return 0, err + } + + // Try to convert to int + valueUint, err := strconv.ParseUint(value[0], 10, 64) + if err != nil { + return 0, fmt.Errorf("%s field could not parse to int value", p.Field) + } + + return uint(valueUint), nil +} + +// GetFloat32 will retrieve int field from data +func (i *Info) GetFloat32(m *MapParams, field string) (float32, error) { + p, value, err := i.GetField(m, field) + if err != nil { + return 0, err + } + + // Try to convert to float + valueFloat, err := strconv.ParseFloat(value[0], 32) + if err != nil { + return 0, fmt.Errorf("%s field could not parse to float value", p.Field) + } + + return float32(valueFloat), nil +} + +// GetFloat64 will retrieve int field from data +func (i *Info) GetFloat64(m *MapParams, field string) (float64, error) { + p, value, err := i.GetField(m, field) + if err != nil { + return 0, err + } + + // Try to convert to float + valueFloat, err := strconv.ParseFloat(value[0], 64) + if err != nil { + return 0, fmt.Errorf("%s field could not parse to float value", p.Field) + } + + return valueFloat, nil +} + +// GetString will retrieve string field from data +func (i *Info) GetString(m *MapParams, field string) (string, error) { + _, value, err := i.GetField(m, field) + if err != nil { + return "", err + } + + return value[0], nil +} + +// GetStringArray will retrieve []string field from data +func (i *Info) GetStringArray(m *MapParams, field string) ([]string, error) { + _, values, err := i.GetField(m, field) + if err != nil { + return nil, err + } + + return values, nil +} + +// GetIntArray will retrieve []int field from data +func (i *Info) GetIntArray(m *MapParams, field string) ([]int, error) { + _, value, err := i.GetField(m, field) + if err != nil { + return nil, err + } + + var ints []int + for i := 0; i < len(value); i++ { + valueInt, err := strconv.ParseInt(value[i], 10, 64) + if err != nil { + return nil, fmt.Errorf("%s value could not parse to int", value[i]) + } + ints = append(ints, int(valueInt)) + } + + return ints, nil +} + +// GetUintArray will retrieve []uint field from data +func (i *Info) GetUintArray(m *MapParams, field string) ([]uint, error) { + _, value, err := i.GetField(m, field) + if err != nil { + return nil, err + } + + var uints []uint + for i := 0; i < len(value); i++ { + valueUint, err := strconv.ParseUint(value[i], 10, 64) + if err != nil { + return nil, fmt.Errorf("%s value could not parse to uint", value[i]) + } + uints = append(uints, uint(valueUint)) + } + + return uints, nil +} + +// GetFloat32Array will retrieve []float field from data +func (i *Info) GetFloat32Array(m *MapParams, field string) ([]float32, error) { + _, value, err := i.GetField(m, field) + if err != nil { + return nil, err + } + + var floats []float32 + for i := 0; i < len(value); i++ { + valueFloat, err := strconv.ParseFloat(value[i], 32) + if err != nil { + return nil, fmt.Errorf("%s value could not parse to float", value[i]) + } + floats = append(floats, float32(valueFloat)) + } + + return floats, nil +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/merch.png b/vendor/github.com/brianvoe/gofakeit/v7/merch.png new file mode 100644 index 0000000000..a7e7276872 Binary files /dev/null and b/vendor/github.com/brianvoe/gofakeit/v7/merch.png differ diff --git a/vendor/github.com/brianvoe/gofakeit/v7/minecraft.go b/vendor/github.com/brianvoe/gofakeit/v7/minecraft.go new file mode 100644 index 0000000000..f15daa87d8 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/minecraft.go @@ -0,0 +1,401 @@ +package gofakeit + +// MinecraftOre will generate a random Minecraft ore +func MinecraftOre() string { return minecraftOre(GlobalFaker) } + +// MinecraftOre will generate a random Minecraft ore +func (f *Faker) MinecraftOre() string { return minecraftOre(f) } + +func minecraftOre(f *Faker) string { return getRandValue(f, []string{"minecraft", "ore"}) } + +// MinecraftWood will generate a random Minecraft wood +func MinecraftWood() string { return minecraftWood(GlobalFaker) } + +// MinecraftWood will generate a random Minecraft wood +func (f *Faker) MinecraftWood() string { return minecraftWood(f) } + +func minecraftWood(f *Faker) string { return getRandValue(f, []string{"minecraft", "wood"}) } + +// MinecraftArmorTier will generate a random Minecraft armor tier +func MinecraftArmorTier() string { return minecraftArmorTier(GlobalFaker) } + +// MinecraftArmorTier will generate a random Minecraft armor tier +func (f *Faker) MinecraftArmorTier() string { return minecraftArmorTier(f) } + +func minecraftArmorTier(f *Faker) string { + return getRandValue(f, []string{"minecraft", "armortier"}) +} + +// MinecraftArmorPart will generate a random Minecraft armor part +func MinecraftArmorPart() string { return minecraftArmorPart(GlobalFaker) } + +// MinecraftArmorPart will generate a random Minecraft armor part +func (f *Faker) MinecraftArmorPart() string { return minecraftArmorPart(f) } + +func minecraftArmorPart(f *Faker) string { + return getRandValue(f, []string{"minecraft", "armorpart"}) +} + +// MinecraftWeapon will generate a random Minecraft weapon +func MinecraftWeapon() string { return minecraftWeapon(GlobalFaker) } + +// MinecraftWeapon will generate a random Minecraft weapon +func (f *Faker) MinecraftWeapon() string { return minecraftWeapon(f) } + +func minecraftWeapon(f *Faker) string { return getRandValue(f, []string{"minecraft", "weapon"}) } + +// MinecraftTool will generate a random Minecraft tool +func MinecraftTool() string { return minecraftTool(GlobalFaker) } + +// MinecraftTool will generate a random Minecraft tool +func (f *Faker) MinecraftTool() string { return minecraftTool(f) } + +func minecraftTool(f *Faker) string { return getRandValue(f, []string{"minecraft", "tool"}) } + +// MinecraftDye will generate a random Minecraft dye +func MinecraftDye() string { return minecraftDye(GlobalFaker) } + +// MinecraftDye will generate a random Minecraft dye +func (f *Faker) MinecraftDye() string { return minecraftDye(f) } + +func minecraftDye(f *Faker) string { return getRandValue(f, []string{"minecraft", "dye"}) } + +// MinecraftFood will generate a random Minecraft food +func MinecraftFood() string { return minecraftFood(GlobalFaker) } + +// MinecraftFood will generate a random Minecraft food +func (f *Faker) MinecraftFood() string { return minecraftFood(f) } + +func minecraftFood(f *Faker) string { return getRandValue(f, []string{"minecraft", "food"}) } + +// MinecraftAnimal will generate a random Minecraft animal +func MinecraftAnimal() string { return minecraftAnimal(GlobalFaker) } + +// MinecraftAnimal will generate a random Minecraft animal +func (f *Faker) MinecraftAnimal() string { return minecraftAnimal(f) } + +func minecraftAnimal(f *Faker) string { + return getRandValue(f, []string{"minecraft", "animal"}) +} + +// MinecraftVillagerJob will generate a random Minecraft villager job +func MinecraftVillagerJob() string { return minecraftVillagerJob(GlobalFaker) } + +// MinecraftVillagerJob will generate a random Minecraft villager job +func (f *Faker) MinecraftVillagerJob() string { return minecraftVillagerJob(f) } + +func minecraftVillagerJob(f *Faker) string { + return getRandValue(f, []string{"minecraft", "villagerjob"}) +} + +// MinecraftVillagerStation will generate a random Minecraft villager station +func MinecraftVillagerStation() string { return minecraftVillagerStation(GlobalFaker) } + +// MinecraftVillagerStation will generate a random Minecraft villager station +func (f *Faker) MinecraftVillagerStation() string { return minecraftVillagerStation(f) } + +func minecraftVillagerStation(f *Faker) string { + return getRandValue(f, []string{"minecraft", "villagerstation"}) +} + +// MinecraftVillagerLevel will generate a random Minecraft villager level +func MinecraftVillagerLevel() string { return minecraftVillagerLevel(GlobalFaker) } + +// MinecraftVillagerLevel will generate a random Minecraft villager level +func (f *Faker) MinecraftVillagerLevel() string { return minecraftVillagerLevel(f) } + +func minecraftVillagerLevel(f *Faker) string { + return getRandValue(f, []string{"minecraft", "villagerlevel"}) +} + +// MinecraftMobPassive will generate a random Minecraft mob passive +func MinecraftMobPassive() string { return minecraftMobPassive(GlobalFaker) } + +// MinecraftMobPassive will generate a random Minecraft mob passive +func (f *Faker) MinecraftMobPassive() string { return minecraftMobPassive(f) } + +func minecraftMobPassive(f *Faker) string { + return getRandValue(f, []string{"minecraft", "mobpassive"}) +} + +// MinecraftMobNeutral will generate a random Minecraft mob neutral +func MinecraftMobNeutral() string { return minecraftMobNeutral(GlobalFaker) } + +// MinecraftMobNeutral will generate a random Minecraft mob neutral +func (f *Faker) MinecraftMobNeutral() string { return minecraftMobNeutral(f) } + +func minecraftMobNeutral(f *Faker) string { + return getRandValue(f, []string{"minecraft", "mobneutral"}) +} + +// MinecraftMobHostile will generate a random Minecraft mob hostile +func MinecraftMobHostile() string { return minecraftMobHostile(GlobalFaker) } + +// MinecraftMobHostile will generate a random Minecraft mob hostile +func (f *Faker) MinecraftMobHostile() string { return minecraftMobHostile(f) } + +func minecraftMobHostile(f *Faker) string { + return getRandValue(f, []string{"minecraft", "mobhostile"}) +} + +// MinecraftMobBoss will generate a random Minecraft mob boss +func MinecraftMobBoss() string { return minecraftMobBoss(GlobalFaker) } + +// MinecraftMobBoss will generate a random Minecraft mob boss +func (f *Faker) MinecraftMobBoss() string { return minecraftMobBoss(f) } + +func minecraftMobBoss(f *Faker) string { + return getRandValue(f, []string{"minecraft", "mobboss"}) +} + +// MinecraftBiome will generate a random Minecraft biome +func MinecraftBiome() string { return minecraftBiome(GlobalFaker) } + +// MinecraftBiome will generate a random Minecraft biome +func (f *Faker) MinecraftBiome() string { return minecraftBiome(f) } + +func minecraftBiome(f *Faker) string { return getRandValue(f, []string{"minecraft", "biome"}) } + +// MinecraftWeather will generate a random Minecraft weather +func MinecraftWeather() string { return minecraftWeather(GlobalFaker) } + +// MinecraftWeather will generate a random Minecraft weather +func (f *Faker) MinecraftWeather() string { return minecraftWeather(f) } + +func minecraftWeather(f *Faker) string { return getRandValue(f, []string{"minecraft", "weather"}) } + +func addMinecraftLookup() { + AddFuncLookup("minecraftore", Info{ + Display: "Minecraft ore", + Category: "minecraft", + Description: "Naturally occurring minerals found in the game Minecraft, used for crafting purposes", + Example: "coal", + Output: "string", + Aliases: []string{"resource block", "crafting ore", "mining material", "mineral node", "in-game ore"}, + Keywords: []string{"naturally", "occurring", "coal", "iron", "gold", "diamond", "lapis", "emerald", "redstone"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftOre(f), nil + }, + }) + + AddFuncLookup("minecraftwood", Info{ + Display: "Minecraft wood", + Category: "minecraft", + Description: "Natural resource in Minecraft, used for crafting various items and building structures", + Example: "oak", + Output: "string", + Aliases: []string{"tree log", "wood block", "timber type", "crafting wood", "building wood"}, + Keywords: []string{"natural", "resource", "oak", "birch", "jungle", "spruce", "mangrove", "planks"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftWood(f), nil + }, + }) + + AddFuncLookup("minecraftarmortier", Info{ + Display: "Minecraft armor tier", + Category: "minecraft", + Description: "Classification system for armor sets in Minecraft, indicating their effectiveness and protection level", + Example: "iron", + Output: "string", + Aliases: []string{"armor level", "armor rank", "armor category", "tier type", "defense tier"}, + Keywords: []string{"classification", "iron", "diamond", "netherite", "leather", "chainmail", "gold", "effectiveness", "defense"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftArmorTier(f), nil + }, + }) + + AddFuncLookup("minecraftarmorpart", Info{ + Display: "Minecraft armor part", + Category: "minecraft", + Description: "Component of an armor set in Minecraft, such as a helmet, chestplate, leggings, or boots", + Example: "helmet", + Output: "string", + Aliases: []string{"armor piece", "armor gear", "armor equipment", "armor slot", "protective item"}, + Keywords: []string{"helmet", "chestplate", "leggings", "boots", "component", "set", "gear"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftArmorPart(f), nil + }, + }) + + AddFuncLookup("minecraftweapon", Info{ + Display: "Minecraft weapon", + Category: "minecraft", + Description: "Tools and items used in Minecraft for combat and defeating hostile mobs", + Example: "bow", + Output: "string", + Aliases: []string{"combat item", "fighting tool", "attack weapon", "battle gear", "mob killer"}, + Keywords: []string{"bow", "sword", "axe", "trident", "crossbow", "used", "combat", "damage"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftWeapon(f), nil + }, + }) + + AddFuncLookup("minecrafttool", Info{ + Display: "Minecraft tool", + Category: "minecraft", + Description: "Items in Minecraft designed for specific tasks, including mining, digging, and building", + Example: "shovel", + Output: "string", + Aliases: []string{"utility tool", "crafting tool", "gathering tool", "work tool", "task tool"}, + Keywords: []string{"pickaxe", "axe", "hoe", "shovel", "fishing-rod", "tasks", "mining", "digging"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftTool(f), nil + }, + }) + + AddFuncLookup("minecraftdye", Info{ + Display: "Minecraft dye", + Category: "minecraft", + Description: "Items used to change the color of various in-game objects", + Example: "white", + Output: "string", + Aliases: []string{"color dye", "pigment item", "colorant", "dye material", "color change"}, + Keywords: []string{"red", "blue", "green", "yellow", "white", "wool", "coloring", "sheep"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftDye(f), nil + }, + }) + + AddFuncLookup("minecraftfood", Info{ + Display: "Minecraft food", + Category: "minecraft", + Description: "Consumable items in Minecraft that provide nourishment to the player character", + Example: "apple", + Output: "string", + Aliases: []string{"edible item", "consumable block", "nourishment item", "hunger food", "survival food"}, + Keywords: []string{"apple", "bread", "meat", "carrot", "potato", "steak", "restore", "health", "hunger"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftFood(f), nil + }, + }) + + AddFuncLookup("minecraftanimal", Info{ + Display: "Minecraft animal", + Category: "minecraft", + Description: "Non-hostile creatures in Minecraft, often used for resources and farming", + Example: "chicken", + Output: "string", + Aliases: []string{"farm animal", "passive mob", "resource creature", "livestock", "tameable mob"}, + Keywords: []string{"cow", "pig", "sheep", "chicken", "horse", "llama", "resources", "farming"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftAnimal(f), nil + }, + }) + + AddFuncLookup("minecraftvillagerjob", Info{ + Display: "Minecraft villager job", + Category: "minecraft", + Description: "The profession or occupation assigned to a villager character in the game", + Example: "farmer", + Output: "string", + Aliases: []string{"villager profession", "npc job", "trade role", "occupation type", "work class"}, + Keywords: []string{"farmer", "librarian", "cleric", "armorer", "fletcher", "smith", "trading"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftVillagerJob(f), nil + }, + }) + + AddFuncLookup("minecraftvillagerstation", Info{ + Display: "Minecraft villager station", + Category: "minecraft", + Description: "Designated area or structure in Minecraft where villagers perform their job-related tasks and trading", + Example: "furnace", + Output: "string", + Aliases: []string{"workstation block", "villager station", "profession station", "trade station", "job block"}, + Keywords: []string{"furnace", "grindstone", "lectern", "brewing", "stand", "smithing", "table", "trading", "block"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftVillagerStation(f), nil + }, + }) + + AddFuncLookup("minecraftvillagerlevel", Info{ + Display: "Minecraft villager level", + Category: "minecraft", + Description: "Measure of a villager's experience and proficiency in their assigned job or profession", + Example: "master", + Output: "string", + Aliases: []string{"villager rank", "experience tier", "profession level", "npc level", "skill grade"}, + Keywords: []string{"novice", "apprentice", "journeyman", "expert", "master", "progression"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftVillagerLevel(f), nil + }, + }) + + AddFuncLookup("minecraftmobpassive", Info{ + Display: "Minecraft mob passive", + Category: "minecraft", + Description: "Non-aggressive creatures in the game that do not attack players", + Example: "cow", + Output: "string", + Aliases: []string{"peaceful mob", "friendly creature", "safe entity", "passive entity", "non-hostile mob"}, + Keywords: []string{"cow", "sheep", "chicken", "villager", "bat", "neutral", "farm"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftMobPassive(f), nil + }, + }) + + AddFuncLookup("minecraftmobneutral", Info{ + Display: "Minecraft mob neutral", + Category: "minecraft", + Description: "Creature in the game that only becomes hostile if provoked, typically defending itself when attacked", + Example: "bee", + Output: "string", + Aliases: []string{"conditional mob", "provokable creature", "neutral mob", "reactive entity", "self-defense mob"}, + Keywords: []string{"bee", "wolf", "enderman", "goat", "attack", "provoked", "defending"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftMobNeutral(f), nil + }, + }) + + AddFuncLookup("minecraftmobhostile", Info{ + Display: "Minecraft mob hostile", + Category: "minecraft", + Description: "Aggressive creatures in the game that actively attack players when encountered", + Example: "spider", + Output: "string", + Aliases: []string{"enemy mob", "aggressive entity", "dangerous creature", "threat mob", "monster mob"}, + Keywords: []string{"spider", "zombie", "skeleton", "creeper", "witch", "attack", "players"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftMobHostile(f), nil + }, + }) + + AddFuncLookup("minecraftmobboss", Info{ + Display: "Minecraft mob boss", + Category: "minecraft", + Description: "Powerful hostile creature in the game, often found in challenging dungeons or structures", + Example: "ender dragon", + Output: "string", + Aliases: []string{"boss mob", "elite mob", "endgame creature", "raid boss", "legendary mob"}, + Keywords: []string{"ender", "dragon", "wither", "warden", "powerful", "challenging", "structure", "hostile"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftMobBoss(f), nil + }, + }) + + AddFuncLookup("minecraftbiome", Info{ + Display: "Minecraft biome", + Category: "minecraft", + Description: "Distinctive environmental regions in the game, characterized by unique terrain, vegetation, and weather", + Example: "forest", + Output: "string", + Aliases: []string{"environment zone", "terrain type", "climate region", "biome area", "ecological zone"}, + Keywords: []string{"forest", "plains", "jungle", "desert", "swamp", "tundra", "savanna"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftBiome(f), nil + }, + }) + + AddFuncLookup("minecraftweather", Info{ + Display: "Minecraft weather", + Category: "minecraft", + Description: "Atmospheric conditions in the game that include rain, thunderstorms, and clear skies, affecting gameplay and ambiance", + Example: "rain", + Output: "string", + Aliases: []string{"climate condition", "weather effect", "game atmosphere", "sky state", "environmental condition"}, + Keywords: []string{"rain", "clear", "thunderstorm", "snow", "atmospheric", "storm", "lightning"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return minecraftWeather(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/misc.go b/vendor/github.com/brianvoe/gofakeit/v7/misc.go new file mode 100644 index 0000000000..37dfbc8b1c --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/misc.go @@ -0,0 +1,170 @@ +package gofakeit + +import ( + "encoding/hex" + "reflect" + + "github.com/brianvoe/gofakeit/v7/data" +) + +// Bool will generate a random boolean value +func Bool() bool { return boolFunc(GlobalFaker) } + +// Bool will generate a random boolean value +func (f *Faker) Bool() bool { return boolFunc(f) } + +func boolFunc(f *Faker) bool { return randIntRange(f, 0, 1) == 1 } + +// UUID (version 4) will generate a random unique identifier based upon random numbers +// Format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +func UUID() string { return uuid(GlobalFaker) } + +// UUID (version 4) will generate a random unique identifier based upon random numbers +// Format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx 8-4-4-4-12 +func (f *Faker) UUID() string { return uuid(f) } + +func uuid(f *Faker) string { + version := byte(4) + uuid := make([]byte, 16) + + // Read 16 random bytes + for i := 0; i < 16; i++ { + uuid[i] = byte(f.IntN(256)) + } + + // Set version + uuid[6] = (uuid[6] & 0x0f) | (version << 4) + + // Set variant + uuid[8] = (uuid[8] & 0xbf) | 0x80 + + buf := make([]byte, 36) + hex.Encode(buf[0:8], uuid[0:4]) + buf[8] = dash + hex.Encode(buf[9:13], uuid[4:6]) + buf[13] = dash + hex.Encode(buf[14:18], uuid[6:8]) + buf[18] = dash + hex.Encode(buf[19:23], uuid[8:10]) + buf[23] = dash + hex.Encode(buf[24:], uuid[10:]) + + return string(buf) +} + +// ShuffleAnySlice takes in a slice and outputs it in a random order +func ShuffleAnySlice(v any) { shuffleAnySlice(GlobalFaker, v) } + +// ShuffleAnySlice takes in a slice and outputs it in a random order +func (f *Faker) ShuffleAnySlice(v any) { shuffleAnySlice(f, v) } + +func shuffleAnySlice(f *Faker, v any) { + if v == nil { + return + } + + // Check type of passed in value, if not a slice return with no action taken + typ := reflect.TypeOf(v) + if typ.Kind() != reflect.Slice { + return + } + + s := reflect.ValueOf(v) + n := s.Len() + + if n <= 1 { + return + } + + swap := func(i, j int) { + tmp := reflect.ValueOf(s.Index(i).Interface()) + s.Index(i).Set(s.Index(j)) + s.Index(j).Set(tmp) + } + + //if size is > int32 probably it will never finish, or ran out of entropy + i := n - 1 + for ; i > 0; i-- { + j := int(int32NFunc(f, int32(i+1))) + swap(i, j) + } +} + +// FlipACoin will return a random value of Heads or Tails +func FlipACoin() string { return flipACoin(GlobalFaker) } + +// FlipACoin will return a random value of Heads or Tails +func (f *Faker) FlipACoin() string { return flipACoin(f) } + +func flipACoin(f *Faker) string { + if boolFunc(f) { + return "Heads" + } + + return "Tails" +} + +// RandomMapKey will return a random key from a map +func RandomMapKey(mapI any) any { return randomMapKey(GlobalFaker, mapI) } + +// RandomMapKey will return a random key from a map +func (f *Faker) RandomMapKey(mapI any) any { return randomMapKey(f, mapI) } + +func randomMapKey(f *Faker, mapI any) any { + keys := reflect.ValueOf(mapI).MapKeys() + return keys[f.IntN(len(keys))].Interface() +} + +// Categories will return a map string array of available data categories and sub categories +func Categories() map[string][]string { + types := make(map[string][]string) + for category, subCategoriesMap := range data.Data { + subCategories := make([]string, 0) + for subType := range subCategoriesMap { + subCategories = append(subCategories, subType) + } + types[category] = subCategories + } + return types +} + +func addMiscLookup() { + AddFuncLookup("uuid", Info{ + Display: "UUID", + Category: "misc", + Description: "128-bit identifier used to uniquely identify objects or entities in computer systems", + Example: "590c1440-9888-45b0-bd51-a817ee07c3f2", + Output: "string", + Aliases: []string{"identifier", "unique", "guid", "id", "128-bit"}, + Keywords: []string{"uuid", "computer", "system", "objects", "entities", "identify", "version", "hexadecimal"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return uuid(f), nil + }, + }) + + AddFuncLookup("bool", Info{ + Display: "Boolean", + Category: "misc", + Description: "Data type that represents one of two possible values, typically true or false", + Example: "true", + Output: "bool", + Aliases: []string{"boolean", "true", "false", "logic", "binary"}, + Keywords: []string{"bool", "data", "type", "represents", "values", "typically", "two", "possible"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return boolFunc(f), nil + }, + }) + + AddFuncLookup("flipacoin", Info{ + Display: "Flip A Coin", + Category: "misc", + Description: "Decision-making method involving the tossing of a coin to determine outcomes", + Example: "Tails", + Output: "string", + Aliases: []string{"coin", "flip", "heads", "tails", "decision", "random"}, + Keywords: []string{"decision-making", "method", "tossing", "determine", "outcomes", "chance", "probability"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return flipACoin(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/movie.go b/vendor/github.com/brianvoe/gofakeit/v7/movie.go new file mode 100644 index 0000000000..5856c137b9 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/movie.go @@ -0,0 +1,95 @@ +package gofakeit + +func MovieName() string { return movieName(GlobalFaker) } + +func (f *Faker) MovieName() string { return movieName(f) } + +func movieName(f *Faker) string { return getRandValue(f, []string{"movie", "name"}) } + +func MovieGenre() string { return movieGenre(GlobalFaker) } + +func (f *Faker) MovieGenre() string { return movieGenre(f) } + +func movieGenre(f *Faker) string { return getRandValue(f, []string{"movie", "genre"}) } + +type MovieInfo struct { + Name string `json:"name" xml:"name"` + Genre string `json:"genre" xml:"genre"` +} + +func Movie() *MovieInfo { return movie(GlobalFaker) } + +func (f *Faker) Movie() *MovieInfo { return movie(f) } + +func movie(f *Faker) *MovieInfo { + return &MovieInfo{ + Name: movieName(f), + Genre: movieGenre(f), + } +} + +func addMovieLookup() { + AddFuncLookup("movie", Info{ + Display: "Movie", + Category: "movie", + Description: "A story told through moving pictures and sound", + Example: `{ + "name": "Psycho", + "genre": "Mystery" +}`, + Output: "map[string]string", + ContentType: "application/json", + Aliases: []string{"cinema", "picture", "story", "entertainment", "motion"}, + Keywords: []string{"movie", "film", "moving", "sound", "pictures", "told", "through", "psycho", "mystery"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return movie(f), nil + }, + }) + + AddFuncLookup("moviename", Info{ + Display: "Movie Name", + Category: "movie", + Description: "Title or name of a specific film used for identification and reference", + Example: "The Matrix", + Output: "string", + Aliases: []string{ + "movie title", + "film title", + "film name", + "motion picture title", + "cinema title", + }, + Keywords: []string{ + "movie", "film", "title", "name", "cinema", + "motionpicture", "blockbuster", "feature", "picture", + "hollywood", "bollywood", "screenplay", "screen", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return movieName(f), nil + }, + }) + + AddFuncLookup("moviegenre", Info{ + Display: "Genre", + Category: "movie", + Description: "Category that classifies movies based on common themes, styles, and storytelling approaches", + Example: "Action", + Output: "string", + Aliases: []string{ + "film genre", + "movie category", + "film type", + "cinema genre", + "movie classification", + }, + Keywords: []string{ + "genre", "category", "type", "classification", + "movie", "film", "cinema", "style", "theme", + "drama", "comedy", "horror", "thriller", "romance", + "documentary", "animation", "sci-fi", "fantasy", "action", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return movieGenre(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/number.go b/vendor/github.com/brianvoe/gofakeit/v7/number.go new file mode 100644 index 0000000000..1e8af3b0a1 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/number.go @@ -0,0 +1,890 @@ +package gofakeit + +import ( + "math" + "math/bits" +) + +// Number will generate a random number between given min and max +func Number(min int, max int) int { return number(GlobalFaker, min, max) } + +// Number will generate a random number between given min and max +func (f *Faker) Number(min int, max int) int { return number(f, min, max) } + +func number(f *Faker, min int, max int) int { return randIntRange(f, min, max) } + +// Uint will generate a random uint value +func Uint() uint { return uintFunc(GlobalFaker) } + +// Uint will generate a random uint value +func (f *Faker) Uint() uint { return uintFunc(f) } + +func uintFunc(f *Faker) uint { return uint(f.Uint64()) } + +// UintN will generate a random uint value between 0 and n +func UintN(n uint) uint { return uintNFunc(GlobalFaker, n) } + +// UintN will generate a random uint value between 0 and n +func (f *Faker) UintN(n uint) uint { return uintNFunc(f, n) } + +func uintNFunc(f *Faker, n uint) uint { + if n == 0 { + return 0 + } + return uint(uint64NFunc(f, uint64(n))) +} + +// Uint8 will generate a random uint8 value +func Uint8() uint8 { return uint8Func(GlobalFaker) } + +// Uint8 will generate a random uint8 value +func (f *Faker) Uint8() uint8 { return uint8Func(f) } + +func uint8Func(f *Faker) uint8 { return uint8(randIntRange(f, minUint, math.MaxUint8)) } + +// Uint16 will generate a random uint16 value +func Uint16() uint16 { return uint16Func(GlobalFaker) } + +// Uint16 will generate a random uint16 value +func (f *Faker) Uint16() uint16 { return uint16Func(f) } + +func uint16Func(f *Faker) uint16 { return uint16(randIntRange(f, minUint, math.MaxUint16)) } + +// Uint32 will generate a random uint32 value +func Uint32() uint32 { return uint32Func(GlobalFaker) } + +// Uint32 will generate a random uint32 value +func (f *Faker) Uint32() uint32 { return uint32Func(f) } + +func uint32Func(f *Faker) uint32 { return uint32(f.Uint64() >> 32) } + +// Uint64 will generate a random uint64 value +func Uint64() uint64 { return GlobalFaker.Uint64() } + +// Uint64 will generate a random uint64 value +// This is the primary location in which the random number is generated. +// This will be the only location in which reading from Rand.Uint64() is lockable +func (f *Faker) Uint64() uint64 { + // Check if the source is locked + if f.Locked { + // Lock the source + f.mu.Lock() + defer f.mu.Unlock() + } + + return f.Rand.Uint64() +} + +// uint64n is the no-bounds-checks version of Uint64N. +// See https://cs.opensource.google/go/go/+/refs/tags/go1.22.0:src/math/rand/v2/rand.go;l=78 +// hidden as to not clutter with additional N functions +func uint64NFunc(f *Faker, n uint64) uint64 { + if is32bit && uint64(uint32(n)) == n { + // create reusable function here + uint32NFunc := func(f *Faker, n uint32) uint32 { + if n&(n-1) == 0 { // n is power of two, can mask + return uint32(f.Uint64()) & (n - 1) + } + + x := f.Uint64() + lo1a, lo0 := bits.Mul32(uint32(x), n) + hi, lo1b := bits.Mul32(uint32(x>>32), n) + lo1, c := bits.Add32(lo1a, lo1b, 0) + hi += c + if lo1 == 0 && lo0 < uint32(n) { + n64 := uint64(n) + thresh := uint32(-n64 % n64) + for lo1 == 0 && lo0 < thresh { + x := f.Uint64() + lo1a, lo0 = bits.Mul32(uint32(x), n) + hi, lo1b = bits.Mul32(uint32(x>>32), n) + lo1, c = bits.Add32(lo1a, lo1b, 0) + hi += c + } + } + return hi + } + + return uint64(uint32NFunc(f, uint32(n))) + } + if n&(n-1) == 0 { // n is power of two, can mask + return f.Uint64() & (n - 1) + } + + hi, lo := bits.Mul64(f.Uint64(), n) + if lo < n { + thresh := -n % n + for lo < thresh { + hi, lo = bits.Mul64(f.Uint64(), n) + } + } + return hi +} + +// UintRange will generate a random uint value between min and max +func UintRange(min, max uint) uint { return uintRangeFunc(GlobalFaker, min, max) } + +// UintRange will generate a random uint value between min and max +func (f *Faker) UintRange(min, max uint) uint { return uintRangeFunc(f, min, max) } + +func uintRangeFunc(f *Faker, min, max uint) uint { return randUintRange(f, min, max) } + +// Int will generate a random int value +func Int() int { return intFunc(GlobalFaker) } + +// Int will generate a random int value +func (f *Faker) Int() int { return intFunc(f) } + +func intFunc(f *Faker) int { return int(uint(f.Uint64()) << 1 >> 1) } + +// IntN will generate a random int value between 0 and n +func IntN(n int) int { return intNFunc(GlobalFaker, n) } + +// IntN will generate a random int value between 0 and n +func (f *Faker) IntN(n int) int { return intNFunc(f, n) } + +func intNFunc(f *Faker, n int) int { + if n <= 0 { + return 0 + } + return int(uint64NFunc(f, uint64(n))) +} + +// Int8 will generate a random Int8 value +func Int8() int8 { return int8Func(GlobalFaker) } + +// Int8 will generate a random Int8 value +func (f *Faker) Int8() int8 { return int8Func(f) } + +func int8Func(f *Faker) int8 { return int8(randIntRange(f, math.MinInt8, math.MaxInt8)) } + +// Int16 will generate a random int16 value +func Int16() int16 { return int16Func(GlobalFaker) } + +// Int16 will generate a random int16 value +func (f *Faker) Int16() int16 { return int16Func(f) } + +func int16Func(f *Faker) int16 { return int16(randIntRange(f, math.MinInt16, math.MaxInt16)) } + +// Int32 will generate a random int32 value +func Int32() int32 { return int32Func(GlobalFaker) } + +// Int32 will generate a random int32 value +func (f *Faker) Int32() int32 { return int32Func(f) } + +func int32Func(f *Faker) int32 { return int32(f.Uint64() >> 33) } + +// int32n is an identical computation to int64n +// hidden as to not clutter with additional N functions +func int32NFunc(f *Faker, n int32) int32 { + if n <= 0 { + return 0 + } + return int32(uint64NFunc(f, uint64(n))) +} + +// Int64 will generate a random int64 value +func Int64() int64 { return int64Func(GlobalFaker) } + +// Int64 will generate a random int64 value +func (f *Faker) Int64() int64 { return int64Func(f) } + +func int64Func(f *Faker) int64 { return int64(f.Uint64() &^ (1 << 63)) } + +// IntRange will generate a random int value between min and max +func IntRange(min, max int) int { return intRangeFunc(GlobalFaker, min, max) } + +// IntRange will generate a random int value between min and max +func (f *Faker) IntRange(min, max int) int { return intRangeFunc(f, min, max) } + +func intRangeFunc(f *Faker, min, max int) int { return randIntRange(f, min, max) } + +// Float32 will generate a random float32 value +func Float32() float32 { return float32Func(GlobalFaker) } + +// Float32 will generate a random float32 value +func (f *Faker) Float32() float32 { return float32Func(f) } + +func float32Func(f *Faker) float32 { + // There are exactly 1<<24 float32s in [0,1). Use Intn(1<<24) / (1<<24). + return float32(f.Uint32()<<8>>8) / (1 << 24) +} + +// Float32Range will generate a random float32 value between min and max +func Float32Range(min, max float32) float32 { + return float32Range(GlobalFaker, min, max) +} + +// Float32Range will generate a random float32 value between min and max +func (f *Faker) Float32Range(min, max float32) float32 { + return float32Range(f, min, max) +} + +func float32Range(f *Faker, min, max float32) float32 { + if min == max { + return min + } + return f.Float32()*(max-min) + min +} + +// Float64 will generate a random float64 value +func Float64() float64 { + return float64Func(GlobalFaker) +} + +// Float64 will generate a random float64 value +func (f *Faker) Float64() float64 { + return float64Func(f) +} + +func float64Func(f *Faker) float64 { + // There are exactly 1<<53 float64s in [0,1). Use Intn(1<<53) / (1<<53). + return float64(f.Uint64()<<11>>11) / (1 << 53) +} + +// Float64Range will generate a random float64 value between min and max +func Float64Range(min, max float64) float64 { + return float64Range(GlobalFaker, min, max) +} + +// Float64Range will generate a random float64 value between min and max +func (f *Faker) Float64Range(min, max float64) float64 { + return float64Range(f, min, max) +} + +func float64Range(f *Faker, min, max float64) float64 { + if min == max { + return min + } + return f.Float64()*(max-min) + min +} + +// ShuffleInts will randomize a slice of ints +func ShuffleInts(a []int) { shuffleInts(GlobalFaker, a) } + +// ShuffleInts will randomize a slice of ints +func (f *Faker) ShuffleInts(a []int) { shuffleInts(f, a) } + +func shuffleInts(f *Faker, a []int) { + for i := range a { + j := f.IntN(i + 1) + a[i], a[j] = a[j], a[i] + } +} + +// RandomInt will take in a slice of int and return a randomly selected value +func RandomInt(i []int) int { return randomInt(GlobalFaker, i) } + +// RandomInt will take in a slice of int and return a randomly selected value +func (f *Faker) RandomInt(i []int) int { return randomInt(f, i) } + +func randomInt(f *Faker, i []int) int { + size := len(i) + if size == 0 { + return 0 + } + if size == 1 { + return i[0] + } + return i[f.IntN(size)] +} + +// RandomUint will take in a slice of uint and return a randomly selected value +func RandomUint(u []uint) uint { return randomUint(GlobalFaker, u) } + +// RandomUint will take in a slice of uint and return a randomly selected value +func (f *Faker) RandomUint(u []uint) uint { return randomUint(f, u) } + +func randomUint(f *Faker, u []uint) uint { + size := len(u) + if size == 0 { + return 0 + } + if size == 1 { + return u[0] + } + return u[f.IntN(size)] +} + +// HexUint will generate a random uint hex value with "0x" prefix +func HexUint(bitSize int) string { return hexUint(GlobalFaker, bitSize) } + +// HexUint will generate a random uint hex value with "0x" prefix +func (f *Faker) HexUint(bitSize int) string { return hexUint(f, bitSize) } + +func hexUint(f *Faker, bitSize int) string { + digits := []byte("0123456789abcdef") + hexLen := (bitSize >> 2) + 2 + if hexLen <= 2 { + return "0x" + } + + s := make([]byte, hexLen) + s[0], s[1] = '0', 'x' + for i := 2; i < hexLen; i++ { + s[i] = digits[f.IntN(16)] + } + return string(s) +} + +func addNumberLookup() { + AddFuncLookup("number", Info{ + Display: "Number", + Category: "number", + Description: "Integer used for counting or measuring, with optional bounds", + Example: "14866", + Output: "int", + Aliases: []string{ + "integer value", + "whole-number output", + "bounded result", + "range-limited value", + "discrete quantity", + }, + Keywords: []string{ + "number", "integer", "int", "random", + "min", "max", "range", "bounded", "between", "inclusive", + }, + Params: []Param{ + {Field: "min", Display: "Min", Type: "int", Default: "-2147483648", Description: "Minimum integer value"}, + {Field: "max", Display: "Max", Type: "int", Default: "2147483647", Description: "Maximum integer value"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetInt(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetInt(m, "max") + if err != nil { + return nil, err + } + return number(f, min, max), nil + }, + }) + + AddFuncLookup("uint", Info{ + Display: "Uint", + Category: "number", + Description: "Unsigned integer (nonnegative whole number)", + Example: "14866", + Output: "uint", + Aliases: []string{ + "nonnegative value", + "natural-count type", + "unsigned whole", + "zero-or-greater", + "cardinal quantity", + }, + Keywords: []string{ + "uint", "unsigned", "integer", "nonnegative", + "natural", "zero", "positive", "whole", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return uintFunc(f), nil }, + }) + + AddFuncLookup("uintn", Info{ + Display: "UintN", + Category: "number", + Description: "Unsigned integer between 0 (inclusive) and n (exclusive)", + Example: "32783", + Output: "uint", + Aliases: []string{ + "upper-bounded uint", + "cap-limited unsigned", + "zero-to-n minus one", + "exclusive-maximum uint", + "limited-range unsigned", + }, + Keywords: []string{ + "uintn", "unsigned", "range", "upper", + "limit", "bound", "cap", "max", "exclusive", + }, + Params: []Param{ + {Field: "n", Display: "N", Type: "uint", Default: "4294967295", Description: "Maximum uint value (exclusive)"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + n, err := info.GetUint(m, "n") + if err != nil { + return nil, err + } + return uintNFunc(f, n), nil + }, + }) + + AddFuncLookup("uint8", Info{ + Display: "Uint8", + Category: "number", + Description: "Unsigned 8-bit integer, range 0–255", + Example: "152", + Output: "uint8", + Aliases: []string{ + "byte-sized unsigned", + "octet quantity", + "small-range unsigned", + "one-byte value", + "0-255 whole", + }, + Keywords: []string{ + "uint8", "unsigned", "8bit", "byte", "octet", "range", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return uint8Func(f), nil }, + }) + + AddFuncLookup("uint16", Info{ + Display: "Uint16", + Category: "number", + Description: "Unsigned 16-bit integer, range 0–65,535", + Example: "34968", + Output: "uint16", + Aliases: []string{ + "two-byte unsigned", + "ushort quantity", + "medium-range unsigned", + "port-sized value", + "0-65535 whole", + }, + Keywords: []string{ + "uint16", "unsigned", "16bit", "word", "port", "range", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return uint16Func(f), nil }, + }) + + AddFuncLookup("uint32", Info{ + Display: "Uint32", + Category: "number", + Description: "Unsigned 32-bit integer, range 0–4,294,967,295", + Example: "1075055705", + Output: "uint32", + Aliases: []string{ + "four-byte unsigned", "u32 numeric", "ipv4-scale value", + "wide-range unsigned", "32-bit whole", "medium unsigned int", "standard unsigned int", + }, + Keywords: []string{ + "uint32", "unsigned", "32bit", "range", "ipv4", "integer", "binary", "numeric", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return uint32Func(f), nil }, + }) + + AddFuncLookup("uint64", Info{ + Display: "Uint64", + Category: "number", + Description: "Unsigned 64-bit integer, range 0–18,446,744,073,709,551,615", + Example: "843730692693298265", + Output: "uint64", + Aliases: []string{ + "eight-byte unsigned", "u64 numeric", "very-large unsigned", "wide whole count", "extended-range value", "large uint", "unsigned bigint", + }, + Keywords: []string{ + "uint64", "unsigned", "64bit", "range", "bigint", "integer", "numeric", "arithmetic", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return f.Uint64(), nil }, + }) + + AddFuncLookup("uintrange", Info{ + Display: "Uint Range", + Category: "number", + Description: "Unsigned integer value within a given range", + Example: "1075055705", + Output: "uint", + Aliases: []string{ + "unsigned span", + "nonnegative interval", + "ranged cardinal", + "bounded unsigned result", + "constrained uint output", + }, + Keywords: []string{ + "uintrange", "unsigned", "range", "min", "max", + "bounds", "limits", "interval", "span", + }, + Params: []Param{ + {Field: "min", Display: "Min", Type: "uint", Default: "0", Description: "Minimum uint value"}, + {Field: "max", Display: "Max", Type: "uint", Default: "4294967295", Description: "Maximum uint value"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetUint(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetUint(m, "max") + if err != nil { + return nil, err + } + return uintRangeFunc(f, min, max), nil + }, + }) + + AddFuncLookup("int", Info{ + Display: "Int", + Category: "number", + Description: "Signed integer", + Example: "14866", + Output: "int", + Aliases: []string{ + "signed whole", + "two-sided count", + "negative-or-positive value", + "zero-inclusive whole", + "general int type", + }, + Keywords: []string{ + "int", "signed", "integer", + "positive", "negative", "zero", "counting", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return intFunc(f), nil }, + }) + + AddFuncLookup("intn", Info{ + Display: "IntN", + Category: "number", + Description: "Integer between 0 (inclusive) and n (exclusive)", + Example: "32783", + Output: "int", + Aliases: []string{ + "upper-bounded int", + "exclusive-maximum int", + "zero-through-n minus one", + "limited-range int", + "cap-limited integer", + }, + Keywords: []string{ + "intn", "range", "upper", "limit", "bound", + "cap", "max", "exclusive", "integer", + }, + Params: []Param{ + {Field: "n", Display: "N", Type: "int", Default: "2147483647", Description: "Maximum int value (exclusive)"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + n, err := info.GetInt(m, "n") + if err != nil { + return nil, err + } + return intNFunc(f, n), nil + }, + }) + + AddFuncLookup("int8", Info{ + Display: "Int8", + Category: "number", + Description: "Signed 8-bit integer, range −128–127", + Example: "24", + Output: "int8", + Aliases: []string{ + "byte-sized signed", "small signed range", "one-byte integer", "8-bit whole signed", "narrow signed value", "tiny int", "signed byte", + }, + Keywords: []string{ + "int8", "signed", "8bit", "range", "twoscomplement", "integer", "arithmetic", "numeric", "binary", "storage", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return int8Func(f), nil }, + }) + + AddFuncLookup("int16", Info{ + Display: "Int16", + Category: "number", + Description: "Signed 16-bit integer, range −32,768–32,767", + Example: "2200", + Output: "int16", + Aliases: []string{ + "two-byte signed", "short integer signed", "16-bit whole signed", "narrow-mid signed", "twobyte int", "short int", "halfword signed", + }, + Keywords: []string{ + "int16", "signed", "16bit", "range", "word", "numeric", "arithmetic", "binary", "integer", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return int16Func(f), nil }, + }) + + AddFuncLookup("int32", Info{ + Display: "Int32", + Category: "number", + Description: "Signed 32-bit integer, range −2,147,483,648–2,147,483,647", + Example: "-1072427943", + Output: "int32", + Aliases: []string{ + "four-byte signed", "standard-width signed", "32-bit whole signed", "midrange integer", "int32 value", "long int", "standard signed int", + }, + Keywords: []string{ + "int32", "signed", "32bit", "range", "ipv4", "numeric", "arithmetic", "binary", "integer", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return int32Func(f), nil }, + }) + + AddFuncLookup("int64", Info{ + Display: "Int64", + Category: "number", + Description: "Signed 64-bit integer, range −9,223,372,036,854,775,808–9,223,372,036,854,775,807", + Example: "-8379641344161477543", + Output: "int64", + Aliases: []string{ + "eight-byte signed", + "long-width integer", + "64-bit whole signed", + "large signed value", + "extended signed range", + }, + Keywords: []string{ + "int64", "signed", "64bit", "bigint", "range", "timestamp", "nanosecond", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return int64Func(f), nil }, + }) + + AddFuncLookup("intrange", Info{ + Display: "Int Range", + Category: "number", + Description: "Signed integer value within a given range", + Example: "-8379477543", + Output: "int", + Aliases: []string{ + "signed span", + "bounded integer result", + "constrained int output", + "limited signed interval", + "ranged whole value", + }, + Keywords: []string{ + "intrange", "int", "range", "min", "max", + "bounds", "limits", "interval", "span", + }, + Params: []Param{ + {Field: "min", Display: "Min", Type: "int", Description: "Minimum int value"}, + {Field: "max", Display: "Max", Type: "int", Description: "Maximum int value"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetInt(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetInt(m, "max") + if err != nil { + return nil, err + } + return intRangeFunc(f, min, max), nil + }, + }) + + AddFuncLookup("float32", Info{ + Display: "Float32", + Category: "number", + Description: "Floating-point number with 32-bit single precision (IEEE 754)", + Example: "3.1128167e+37", + Output: "float32", + Aliases: []string{ + "single-precision float", + "fp32 numeric", + "32-bit real", + "float single", + "reduced-precision real", + }, + Keywords: []string{ + "float32", "single-precision", "ieee754", + "fp32", "mantissa", "exponent", "decimal", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return float32Func(f), nil }, + }) + + AddFuncLookup("float32range", Info{ + Display: "Float32 Range", + Category: "number", + Description: "Float32 value within a given range", + Example: "914774.6", + Output: "float32", + Aliases: []string{ + "single-precision span", + "bounded fp32", + "limited float32 output", + "constrained 32-bit real", + "float single interval", + }, + Keywords: []string{ + "float32range", "float32", "range", + "min", "max", "bounds", "limits", "interval", + }, + Params: []Param{ + {Field: "min", Display: "Min", Type: "float", Description: "Minimum float32 value"}, + {Field: "max", Display: "Max", Type: "float", Description: "Maximum float32 value"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetFloat32(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetFloat32(m, "max") + if err != nil { + return nil, err + } + return float32Range(f, min, max), nil + }, + }) + + AddFuncLookup("float64", Info{ + Display: "Float64", + Category: "number", + Description: "Floating-point number with 64-bit double precision (IEEE 754)", + Example: "1.644484108270445e+307", + Output: "float64", + Aliases: []string{ + "double-precision float", + "fp64 numeric", + "64-bit real", + "float double", + "high-precision real", + }, + Keywords: []string{ + "float64", "double-precision", "ieee754", + "fp64", "mantissa", "exponent", "decimal", + "precision", "scientific", "number", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return float64Func(f), nil }, + }) + + AddFuncLookup("float64range", Info{ + Display: "Float64 Range", + Category: "number", + Description: "Float64 value within a given range", + Example: "914774.5585333086", + Output: "float64", + Aliases: []string{ + "double-precision span", + "bounded fp64", + "limited float64 output", + "constrained 64-bit real", + "float double interval", + }, + Keywords: []string{ + "float64range", "float64", "range", + "min", "max", "bounds", "limits", "interval", + }, + Params: []Param{ + {Field: "min", Display: "Min", Type: "float", Description: "Minimum float64 value"}, + {Field: "max", Display: "Max", Type: "float", Description: "Maximum float64 value"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetFloat64(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetFloat64(m, "max") + if err != nil { + return nil, err + } + return float64Range(f, min, max), nil + }, + }) + + AddFuncLookup("shuffleints", Info{ + Display: "Shuffle Ints", + Category: "number", + Description: "Shuffles an array of ints", + Example: "1,2,3,4 => 3,1,4,2", + Output: "[]int", + Aliases: []string{ + "reorder integers", + "scramble int slice", + "random permutation ints", + "reshuffle numbers", + "jumbled int output", + }, + Keywords: []string{ + "shuffleints", "shuffle", "permute", "randomize", + "ints", "slice", "array", "permutation", + }, + Params: []Param{ + {Field: "ints", Display: "Integers", Type: "[]int", Description: "Delimited separated integers"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + ints, err := info.GetIntArray(m, "ints") + if err != nil { + return nil, err + } + shuffleInts(f, ints) + return ints, nil + }, + }) + + AddFuncLookup("randomint", Info{ + Display: "Random Int", + Category: "number", + Description: "Randomly selected value from a slice of int", + Example: "-1,2,-3,4 => -3", + Output: "int", + Aliases: []string{ + "draw one integer", + "sample an int", + "pick from ints", + "select a number", + "choose single int", + }, + Keywords: []string{ + "randomint", "random", "pick", "choose", + "select", "ints", "slice", "list", + }, + Params: []Param{ + {Field: "ints", Display: "Integers", Type: "[]int", Description: "Delimited separated integers"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + ints, err := info.GetIntArray(m, "ints") + if err != nil { + return nil, err + } + return randomInt(f, ints), nil + }, + }) + + AddFuncLookup("randomuint", Info{ + Display: "Random Uint", + Category: "number", + Description: "Randomly selected value from a slice of uint", + Example: "1,2,3,4 => 4", + Output: "uint", + Aliases: []string{ + "draw one unsigned", + "sample a uint", + "pick from uints", + "select an unsigned", + "choose single uint", + }, + Keywords: []string{ + "randomuint", "random", "pick", "choose", + "select", "uints", "slice", "list", "nonnegative", + }, + Params: []Param{ + {Field: "uints", Display: "Unsigned Integers", Type: "[]uint", Description: "Delimited separated unsigned integers"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + uints, err := info.GetUintArray(m, "uints") + if err != nil { + return nil, err + } + return randomUint(f, uints), nil + }, + }) + + AddFuncLookup("hexuint", Info{ + Display: "HexUint", + Category: "number", + Description: "Hexadecimal representation of an unsigned integer", + Example: "0x87", + Output: "string", + Aliases: []string{ + "hex-encoded unsigned", + "base-16 uint string", + "prefixed 0x value", + "hex view of uint", + "formatted unsigned hex", + }, + Keywords: []string{ + "hexuint", "hex", "base16", "uint", "0x", + "bits", "width", "format", + }, + Params: []Param{ + {Field: "bitSize", Display: "Bit Size", Type: "int", Default: "8", Description: "Bit size of the unsigned integer"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + bitSize, err := info.GetInt(m, "bitSize") + if err != nil { + return nil, err + } + return hexUint(f, bitSize), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/payment.go b/vendor/github.com/brianvoe/gofakeit/v7/payment.go new file mode 100644 index 0000000000..659d3295f3 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/payment.go @@ -0,0 +1,529 @@ +package gofakeit + +import ( + "math" + "strconv" + "strings" + "time" + + "github.com/brianvoe/gofakeit/v7/data" +) + +// CurrencyInfo is a struct of currency information +type CurrencyInfo struct { + Short string `json:"short" xml:"short"` + Long string `json:"long" xml:"long"` +} + +// Currency will generate a struct with random currency information +func Currency() *CurrencyInfo { return currency(GlobalFaker) } + +// Currency will generate a struct with random currency information +func (f *Faker) Currency() *CurrencyInfo { return currency(f) } + +func currency(f *Faker) *CurrencyInfo { + index := f.IntN(len(data.Data["currency"]["short"])) + return &CurrencyInfo{ + Short: data.Data["currency"]["short"][index], + Long: data.Data["currency"]["long"][index], + } +} + +// CurrencyShort will generate a random short currency value +func CurrencyShort() string { return currencyShort(GlobalFaker) } + +// CurrencyShort will generate a random short currency value +func (f *Faker) CurrencyShort() string { return currencyShort(f) } + +func currencyShort(f *Faker) string { return getRandValue(f, []string{"currency", "short"}) } + +// CurrencyLong will generate a random long currency name +func CurrencyLong() string { return currencyLong(GlobalFaker) } + +// CurrencyLong will generate a random long currency name +func (f *Faker) CurrencyLong() string { return currencyLong(f) } + +func currencyLong(f *Faker) string { return getRandValue(f, []string{"currency", "long"}) } + +// Price will take in a min and max value and return a formatted price +func Price(min, max float64) float64 { return price(GlobalFaker, min, max) } + +// Price will take in a min and max value and return a formatted price +func (f *Faker) Price(min, max float64) float64 { return price(f, min, max) } + +func price(f *Faker, min, max float64) float64 { + return math.Floor(float64Range(f, min, max)*100) / 100 +} + +// CreditCardInfo is a struct containing credit variables +type CreditCardInfo struct { + Type string `json:"type" xml:"type"` + Number string `json:"number" xml:"number"` + Exp string `json:"exp" xml:"exp"` + Cvv string `json:"cvv" xml:"cvv"` +} + +// CreditCard will generate a struct full of credit card information +func CreditCard() *CreditCardInfo { return creditCard(GlobalFaker) } + +// CreditCard will generate a struct full of credit card information +func (f *Faker) CreditCard() *CreditCardInfo { return creditCard(f) } + +func creditCard(f *Faker) *CreditCardInfo { + ccType := randomString(f, data.CreditCardTypes) + ccv, _ := generate(f, strings.Repeat("#", int(data.CreditCards[randomString(f, data.CreditCardTypes)].Code.Size))) + + return &CreditCardInfo{ + Type: data.CreditCards[randomString(f, data.CreditCardTypes)].Display, + Number: creditCardNumber(f, &CreditCardOptions{Types: []string{ccType}}), + Exp: creditCardExp(f), + Cvv: ccv, + } +} + +// CreditCardType will generate a random credit card type string +func CreditCardType() string { return creditCardType(GlobalFaker) } + +// CreditCardType will generate a random credit card type string +func (f *Faker) CreditCardType() string { return creditCardType(f) } + +func creditCardType(f *Faker) string { + return data.CreditCards[randomString(f, data.CreditCardTypes)].Display +} + +// CreditCardOptions is the options for credit card number +type CreditCardOptions struct { + Types []string `json:"types"` + Bins []string `json:"bins"` // optional parameter of prepended numbers + Gaps bool `json:"gaps"` +} + +// CreditCardNumber will generate a random luhn credit card number +func CreditCardNumber(cco *CreditCardOptions) string { return creditCardNumber(GlobalFaker, cco) } + +// CreditCardNumber will generate a random luhn credit card number +func (f *Faker) CreditCardNumber(cco *CreditCardOptions) string { return creditCardNumber(f, cco) } + +func creditCardNumber(f *Faker, cco *CreditCardOptions) string { + if cco == nil { + cco = &CreditCardOptions{} + } + if len(cco.Types) == 0 { + cco.Types = data.CreditCardTypes + } + ccType := randomString(f, cco.Types) + + // Get Card info + var cardInfo data.CreditCardInfo + if info, ok := data.CreditCards[ccType]; ok { + cardInfo = info + } else { + ccType = randomString(f, data.CreditCardTypes) + cardInfo = data.CreditCards[ccType] + } + + // Get length and pattern + length := randomUint(f, cardInfo.Lengths) + numStr := "" + if len(cco.Bins) >= 1 { + numStr = randomString(f, cco.Bins) + } else { + numStr = strconv.FormatUint(uint64(randomUint(f, cardInfo.Patterns)), 10) + } + numStr += strings.Repeat("#", int(length)-len(numStr)) + numStr = numerify(f, numStr) + ui, _ := strconv.ParseUint(numStr, 10, 64) + + // Loop through until its a valid luhn + for { + valid := isLuhn(strconv.FormatUint(ui, 10)) + if valid { + break + } + ui++ + } + numStr = strconv.FormatUint(ui, 10) + + // Add gaps to number + if cco.Gaps { + for i, spot := range cardInfo.Gaps { + numStr = numStr[:(int(spot)+i)] + " " + numStr[(int(spot)+i):] + } + } + + return numStr +} + +// CreditCardExp will generate a random credit card expiration date string +// Exp date will always be a future date +func CreditCardExp() string { return creditCardExp(GlobalFaker) } + +// CreditCardExp will generate a random credit card expiration date string +// Exp date will always be a future date +func (f *Faker) CreditCardExp() string { return creditCardExp(f) } + +func creditCardExp(f *Faker) string { + month := strconv.Itoa(randIntRange(f, 1, 12)) + if len(month) == 1 { + month = "0" + month + } + + var currentYear = time.Now().Year() - 2000 + return month + "/" + strconv.Itoa(randIntRange(f, currentYear+1, currentYear+10)) +} + +// CreditCardCvv will generate a random CVV number +// Its a string because you could have 017 as an exp date +func CreditCardCvv() string { return creditCardCvv(GlobalFaker) } + +// CreditCardCvv will generate a random CVV number +// Its a string because you could have 017 as an exp date +func (f *Faker) CreditCardCvv() string { return creditCardCvv(f) } + +func creditCardCvv(f *Faker) string { return numerify(f, "###") } + +// isLuhn check is used for checking if credit card is a valid luhn card +func isLuhn(s string) bool { + var t = [...]int{0, 2, 4, 6, 8, 1, 3, 5, 7, 9} + odd := len(s) & 1 + var sum int + for i, c := range s { + if c < '0' || c > '9' { + return false + } + if i&1 == odd { + sum += t[c-'0'] + } else { + sum += int(c - '0') + } + } + return sum%10 == 0 +} + +// AchRouting will generate a 9 digit routing number +func AchRouting() string { return achRouting(GlobalFaker) } + +// AchRouting will generate a 9 digit routing number +func (f *Faker) AchRouting() string { return achRouting(f) } + +func achRouting(f *Faker) string { return numerify(f, "#########") } + +// AchAccount will generate a 12 digit account number +func AchAccount() string { return achAccount(GlobalFaker) } + +// AchAccount will generate a 12 digit account number +func (f *Faker) AchAccount() string { return achAccount(f) } + +func achAccount(f *Faker) string { return numerify(f, "############") } + +// BitcoinAddress will generate a random bitcoin address consisting of numbers, upper and lower characters +func BitcoinAddress() string { return bitcoinAddress(GlobalFaker) } + +// BitcoinAddress will generate a random bitcoin address consisting of numbers, upper and lower characters +func (f *Faker) BitcoinAddress() string { return bitcoinAddress(f) } + +func bitcoinAddress(f *Faker) string { + return randomString(f, []string{"1", "3"}) + password(f, true, true, true, false, false, number(f, 25, 34)) +} + +// BitcoinPrivateKey will generate a random bitcoin private key base58 consisting of numbers, upper and lower characters +func BitcoinPrivateKey() string { return bitcoinPrivateKey(GlobalFaker) } + +// BitcoinPrivateKey will generate a random bitcoin private key base58 consisting of numbers, upper and lower characters +func (f *Faker) BitcoinPrivateKey() string { return bitcoinPrivateKey(f) } + +func bitcoinPrivateKey(f *Faker) string { + var b strings.Builder + for i := 0; i < 49; i++ { + b.WriteString(randCharacter(f, base58)) + } + return "5" + randomString(f, []string{"H", "J", "K"}) + b.String() +} + +func BankName() string { return bankName(GlobalFaker) } + +func (f *Faker) BankName() string { return bankName(f) } + +func bankName(f *Faker) string { return getRandValue(f, []string{"bank", "name"}) } + +func BankType() string { return bankType(GlobalFaker) } + +func (f *Faker) BankType() string { return bankType(f) } + +func bankType(f *Faker) string { return getRandValue(f, []string{"bank", "type"}) } + +func addPaymentLookup() { + AddFuncLookup("currency", Info{ + Display: "Currency", + Category: "payment", + Description: "Medium of exchange, often in the form of money, used for trade and transactions", + Example: `{ + "short": "IQD", + "long": "Iraq Dinar" +}`, + Output: "map[string]string", + ContentType: "application/json", + Aliases: []string{ + "currency unit", "currency code", "money type", "exchange currency", "monetary unit", "legal tender", "fiat money", + }, + Keywords: []string{ + "currency", "money", "exchange", "fiat", "unit", "code", "iso", "usd", "eur", "gbp", "jpy", "cny", "trade", "transaction", "market", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return currency(f), nil }, + }) + + AddFuncLookup("currencyshort", Info{ + Display: "Currency Short", + Category: "payment", + Description: "Short 3-letter ISO code used to represent a specific currency", + Example: "USD", + Output: "string", + Aliases: []string{ + "iso alpha-3", "currency ticker", "alpha-3 code", "currency shorthand", "iso-4217 code", + }, + Keywords: []string{ + "currency", "short", "iso", "code", "alpha3", "usd", "eur", "gbp", "jpy", "cad", "aud", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return currencyShort(f), nil }, + }) + + AddFuncLookup("currencylong", Info{ + Display: "Currency Long", + Category: "payment", + Description: "Complete name of a specific currency used in financial transactions", + Example: "United States Dollar", + Output: "string", + Aliases: []string{ + "currency name", "full currency", "long form name", "official currency", "monetary name", + }, + Keywords: []string{ + "currency", "name", "long", "full", "official", "dollar", "euro", "pound", "yen", "franc", "peso", "rupee", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return currencyLong(f), nil }, + }) + + AddFuncLookup("price", Info{ + Display: "Price", + Category: "payment", + Description: "The amount of money assigned to a product, service, or asset in a transaction", + Example: "92.26", + Output: "float64", + Aliases: []string{ + "amount", "cost", "value", "fee", "charge", "rate", "unit price", + }, + Keywords: []string{ + "price", "payment", "transaction", "retail", "wholesale", "market", "asset", "listing", "quote", "valuation", + }, + Params: []Param{ + {Field: "min", Display: "Min", Type: "float", Default: "0", Description: "Minimum price value"}, + {Field: "max", Display: "Max", Type: "float", Default: "1000", Description: "Maximum price value"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + min, err := info.GetFloat64(m, "min") + if err != nil { + return nil, err + } + max, err := info.GetFloat64(m, "max") + if err != nil { + return nil, err + } + return price(f, min, max), nil + }, + }) + + AddFuncLookup("creditcard", Info{ + Display: "Credit Card", + Category: "payment", + Description: "Card allowing users to make purchases on credit, with payment due at a later date", + Example: `{ + "type": "Visa", + "number": "4111111111111111", + "exp": "02/27", + "cvv": "123" +}`, + Output: "map[string]any", + ContentType: "application/json", + Aliases: []string{ + "credit card", "payment card", "charge card", "credit instrument", "card account", "plastic card", + }, + Keywords: []string{ + "credit", "card", "payment", "debt", "visa", "mastercard", "amex", "discover", "unionpay", "maestro", "jcb", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return creditCard(f), nil }, + }) + + AddFuncLookup("creditcardtype", Info{ + Display: "Credit Card Type", + Category: "payment", + Description: "Classification of credit cards based on the issuing company", + Example: "Visa", + Output: "string", + Aliases: []string{ + "credit card type", "issuer brand", "card network", "scheme name", "card family", "issuer type", + }, + Keywords: []string{ + "credit", "card", "type", "issuer", "brand", "network", "visa", "mastercard", "amex", "discover", "unionpay", "maestro", "jcb", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return creditCardType(f), nil }, + }) + + AddFuncLookup("creditcardnumber", Info{ + Display: "Credit Card Number", + Category: "payment", + Description: "Unique number on a credit card used for electronic payments", + Example: "4111111111111111", + Output: "string", + Aliases: []string{ + "credit card", "credit card number", "card number", "cc number", "primary account number", "pan value", "payment number", + }, + Keywords: []string{ + "credit", "card", "number", "identifier", "luhn", "validation", "checksum", "bin", "tokenize", "masking", "digits", + }, + Params: []Param{ + { + Field: "types", Display: "Types", Type: "[]string", Default: "all", + Options: []string{"visa", "mastercard", "american-express", "diners-club", "discover", "jcb", "unionpay", "maestro", "elo", "hiper", "hipercard"}, + Description: "A select number of types you want to use when generating a credit card number", + }, + {Field: "bins", Display: "Bins", Type: "[]string", Optional: true, Description: "Optional list of prepended bin numbers to pick from"}, + {Field: "gaps", Display: "Gaps", Type: "bool", Default: "false", Optional: true, Description: "Whether or not to have gaps in number"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + types, err := info.GetStringArray(m, "types") + if err != nil { + return nil, err + } + if len(types) == 1 && types[0] == "all" { + types = []string{} + } + bins, _ := info.GetStringArray(m, "bins") + gaps, _ := info.GetBool(m, "gaps") + options := CreditCardOptions{Types: types, Gaps: gaps} + if len(bins) >= 1 { + options.Bins = bins + } + return creditCardNumber(f, &options), nil + }, + }) + + AddFuncLookup("creditcardexp", Info{ + Display: "Credit Card Exp", + Category: "payment", + Description: "Expiration date of a credit card", + Example: "01/27", + Output: "string", + Aliases: []string{ + "credit card exp", "credit card expiration", "expiry date", "expiration date", "exp date", "valid thru", "card expiry", + }, + Keywords: []string{ + "credit", "card", "exp", "expiry", "expiration", "month", "year", "validity", "future", "expired", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return creditCardExp(f), nil }, + }) + + AddFuncLookup("creditcardcvv", Info{ + Display: "Credit Card CVV", + Category: "payment", + Description: "Three or four-digit security code on a credit card", + Example: "513", + Output: "string", + Aliases: []string{ + "credit card cvv", "cvv", "cvc", "cid", "security number", "auth digits", "card check value", "security code", + }, + Keywords: []string{ + "security", "code", "verification", "authentication", "fraud", "protection", "online", "payment", "transaction", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return creditCardCvv(f), nil }, + }) + + AddFuncLookup("achrouting", Info{ + Display: "ACH Routing Number", + Category: "payment", + Description: "Nine-digit code used in the U.S. for identifying a bank in ACH transactions", + Example: "513715684", + Output: "string", + Aliases: []string{ + "routing number", "aba number", "routing transit number", "rtn code", "bank routing id", + }, + Keywords: []string{ + "ach", "routing", "aba", "us", "bank", "federal", "reserve", "clearinghouse", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return achRouting(f), nil }, + }) + + AddFuncLookup("achaccount", Info{ + Display: "ACH Account Number", + Category: "payment", + Description: "Bank account number used for Automated Clearing House transactions", + Example: "491527954328", + Output: "string", + Aliases: []string{ + "account number", "ach account", "bank account", "checking account", "savings account", "account identifier", + }, + Keywords: []string{ + "ach", "account", "banking", "checking", "savings", "finance", "electronic", "transfer", "payment", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return achAccount(f), nil }, + }) + + AddFuncLookup("bitcoinaddress", Info{ + Display: "Bitcoin Address", + Category: "payment", + Description: "Cryptographic identifier used to send and receive Bitcoin", + Example: "1BoatSLRHtKNngkdXEeobR76b53LETtpyT", + Output: "string", + Aliases: []string{ + "btc address", "bitcoin wallet", "crypto address", "public address", "payment address", + }, + Keywords: []string{ + "bitcoin", "btc", "wallet", "blockchain", "public", "key", "hash", "base58", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return bitcoinAddress(f), nil }, + }) + + AddFuncLookup("bitcoinprivatekey", Info{ + Display: "Bitcoin Private Key", + Category: "payment", + Description: "Secret key that allows access and control over Bitcoin holdings", + Example: "5HueCGU8rMjxEXxiPuD5BDuG6o5xjA7QkbPp", + Output: "string", + Aliases: []string{ + "btc private key", "wallet key", "secret key", "private wif", "signing key", + }, + Keywords: []string{ + "bitcoin", "btc", "private", "key", "blockchain", "wallet", "signature", "base58", "wif", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return bitcoinPrivateKey(f), nil }, + }) + + AddFuncLookup("bankname", Info{ + Display: "Bank Name", + Category: "payment", + Description: "Name of a financial institution that offers banking services", + Example: "Wells Fargo", + Output: "string", + Aliases: []string{ + "financial institution", "banking entity", "lender name", "depository name", "institution title", + }, + Keywords: []string{ + "bank", "name", "institution", "financial", "wells", "fargo", "chase", "citibank", "pnc", "boa", "usbank", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return bankName(f), nil }, + }) + + AddFuncLookup("banktype", Info{ + Display: "Bank Type", + Category: "payment", + Description: "Classification of a bank based on its services and operations", + Example: "Investment Bank", + Output: "string", + Aliases: []string{ + "bank classification", "bank category", "bank segment", "institution class", "service tier", + }, + Keywords: []string{ + "bank", "type", "classification", "category", "segment", "investment", "commercial", "retail", "savings", "credit", "union", "central", "federal", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return bankType(f), nil }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/person.go b/vendor/github.com/brianvoe/gofakeit/v7/person.go new file mode 100644 index 0000000000..32e8a42cb5 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/person.go @@ -0,0 +1,617 @@ +package gofakeit + +import ( + "math" + "strconv" + "strings" +) + +// PersonInfo is a struct of person information +type PersonInfo struct { + FirstName string `json:"first_name" xml:"first_name"` + LastName string `json:"last_name" xml:"last_name"` + Gender string `json:"gender" xml:"gender"` + SSN string `json:"ssn" xml:"ssn"` + Hobby string `json:"hobby" xml:"hobby"` + Job *JobInfo `json:"job" xml:"job"` + Address *AddressInfo `json:"address" xml:"address"` + Contact *ContactInfo `json:"contact" xml:"contact"` + CreditCard *CreditCardInfo `json:"credit_card" xml:"credit_card"` +} + +// Person will generate a struct with person information +func Person() *PersonInfo { return person(GlobalFaker) } + +// Person will generate a struct with person information +func (f *Faker) Person() *PersonInfo { return person(f) } + +func person(f *Faker) *PersonInfo { + return &PersonInfo{ + FirstName: firstName(f), + LastName: lastName(f), + Gender: gender(f), + SSN: ssn(f), + Hobby: hobby(f), + Job: job(f), + Address: address(f), + Contact: contact(f), + CreditCard: creditCard(f), + } +} + +// Name will generate a random First and Last Name +func Name() string { return name(GlobalFaker) } + +// Name will generate a random First and Last Name +func (f *Faker) Name() string { return name(f) } + +func name(f *Faker) string { + return getRandValue(f, []string{"person", "first"}) + " " + getRandValue(f, []string{"person", "last"}) +} + +// FirstName will generate a random first name +func FirstName() string { return firstName(GlobalFaker) } + +// FirstName will generate a random first name +func (f *Faker) FirstName() string { return firstName(f) } + +func firstName(f *Faker) string { return getRandValue(f, []string{"person", "first"}) } + +// MiddleName will generate a random middle name +func MiddleName() string { return middleName(GlobalFaker) } + +// MiddleName will generate a random middle name +func (f *Faker) MiddleName() string { return middleName(f) } + +func middleName(f *Faker) string { return getRandValue(f, []string{"person", "middle"}) } + +// LastName will generate a random last name +func LastName() string { return lastName(GlobalFaker) } + +// LastName will generate a random last name +func (f *Faker) LastName() string { return lastName(f) } + +func lastName(f *Faker) string { return getRandValue(f, []string{"person", "last"}) } + +// NamePrefix will generate a random name prefix +func NamePrefix() string { return namePrefix(GlobalFaker) } + +// NamePrefix will generate a random name prefix +func (f *Faker) NamePrefix() string { return namePrefix(f) } + +func namePrefix(f *Faker) string { return getRandValue(f, []string{"person", "prefix"}) } + +// NameSuffix will generate a random name suffix +func NameSuffix() string { return nameSuffix(GlobalFaker) } + +// NameSuffix will generate a random name suffix +func (f *Faker) NameSuffix() string { return nameSuffix(f) } + +func nameSuffix(f *Faker) string { return getRandValue(f, []string{"person", "suffix"}) } + +// SSN will generate a random Social Security Number +func SSN() string { return ssn(GlobalFaker) } + +// SSN will generate a random Social Security Number +func (f *Faker) SSN() string { return ssn(f) } + +func ssn(f *Faker) string { return strconv.Itoa(randIntRange(f, 100000000, 999999999)) } + +// EIN will generate a random Employer Identification Number +func EIN() string { return ein(GlobalFaker) } + +// EIN will generate a random Employer Identification Number +func (f *Faker) EIN() string { return ein(f) } + +func ein(f *Faker) string { + // EIN format: XX-XXXXXXX (2 digits, dash, 7 digits) + // First two digits have specific valid prefixes + prefixes := []string{"10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63", "64", "65", "66", "67", "68", "69", "70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "80", "81", "82", "83", "84", "85", "86", "87", "88", "89", "90", "91", "92", "93", "94", "95", "96", "97", "98", "99"} + prefix := prefixes[f.IntN(len(prefixes))] + + // Generate 7 random digits + sevenDigits := "" + for i := 0; i < 7; i++ { + sevenDigits += string(rune('0' + f.IntN(10))) + } + + return prefix + "-" + sevenDigits +} + +// Gender will generate a random gender string +func Gender() string { return gender(GlobalFaker) } + +// Gender will generate a random gender string +func (f *Faker) Gender() string { return gender(f) } + +func gender(f *Faker) string { + if boolFunc(f) { + return "male" + } + + return "female" +} + +// Hobby will generate a random hobby string +func Hobby() string { return hobby(GlobalFaker) } + +// Hobby will generate a random hobby string +func (f *Faker) Hobby() string { return hobby(f) } + +func hobby(f *Faker) string { return getRandValue(f, []string{"person", "hobby"}) } + +// ContactInfo struct full of contact info +type ContactInfo struct { + Phone string `json:"phone" xml:"phone"` + Email string `json:"email" xml:"email"` +} + +// Contact will generate a struct with information randomly populated contact information +func Contact() *ContactInfo { return contact(GlobalFaker) } + +// Contact will generate a struct with information randomly populated contact information +func (f *Faker) Contact() *ContactInfo { return contact(f) } + +func contact(f *Faker) *ContactInfo { + return &ContactInfo{ + Phone: phone(f), + Email: email(f), + } +} + +// Phone will generate a random phone number string +func Phone() string { return phone(GlobalFaker) } + +// Phone will generate a random phone number string +func (f *Faker) Phone() string { return phone(f) } + +func phone(f *Faker) string { return replaceWithNumbers(f, "##########") } + +// PhoneFormatted will generate a random phone number string +func PhoneFormatted() string { return phoneFormatted(GlobalFaker) } + +// PhoneFormatted will generate a random phone number string +func (f *Faker) PhoneFormatted() string { return phoneFormatted(f) } + +func phoneFormatted(f *Faker) string { + return replaceWithNumbers(f, getRandValue(f, []string{"person", "phone"})) +} + +// Email will generate a random email string +func Email() string { return email(GlobalFaker) } + +// Email will generate a random email string +func (f *Faker) Email() string { return email(f) } + +func email(f *Faker) string { + email := getRandValue(f, []string{"person", "first"}) + getRandValue(f, []string{"person", "last"}) + email += "@" + email += getRandValue(f, []string{"person", "last"}) + "." + getRandValue(f, []string{"internet", "domain_suffix"}) + + return strings.ToLower(email) +} + +// Teams takes in an array of people and team names and randomly places the people into teams as evenly as possible +func Teams(peopleArray []string, teamsArray []string) map[string][]string { + return teams(GlobalFaker, peopleArray, teamsArray) +} + +// Teams takes in an array of people and team names and randomly places the people into teams as evenly as possible +func (f *Faker) Teams(peopleArray []string, teamsArray []string) map[string][]string { + return teams(f, peopleArray, teamsArray) +} + +func teams(f *Faker, people []string, teams []string) map[string][]string { + // Shuffle the people if more than 1 + if len(people) > 1 { + shuffleStrings(f, people) + } + + peopleIndex := 0 + teamsOutput := make(map[string][]string) + numPer := math.Ceil(float64(len(people)) / float64(len(teams))) + for _, team := range teams { + teamsOutput[team] = []string{} + for i := 0.00; i < numPer; i++ { + if peopleIndex < len(people) { + teamsOutput[team] = append(teamsOutput[team], people[peopleIndex]) + peopleIndex++ + } + } + } + + return teamsOutput +} + +func addPersonLookup() { + AddFuncLookup("person", Info{ + Display: "Person", + Category: "person", + Description: "Personal data, like name and contact details, used for identification and communication", + Example: `{ + "first_name": "Markus", + "last_name": "Moen", + "gender": "male", + "ssn": "275413589", + "image": "https://picsum.photos/208/500", + "hobby": "Lacrosse", + "job": { + "company": "Intermap Technologies", + "title": "Developer", + "descriptor": "Direct", + "level": "Paradigm" + }, + "address": { + "address": "369 North Cornerbury, Miami, North Dakota 24259", + "street": "369 North Cornerbury", + "city": "Miami", + "state": "North Dakota", + "zip": "24259", + "country": "Ghana", + "latitude": -6.662595, + "longitude": 23.921575 + }, + "contact": { + "phone": "3023202027", + "email": "lamarkoelpin@heaney.biz" + }, + "credit_card": { + "type": "Maestro", + "number": "39800889982276", + "exp": "01/29", + "cvv": "932" + } +}`, + Output: "map[string]any", + ContentType: "application/json", + Aliases: []string{ + "person record", + "identity profile", + "user profile", + "personal info", + "individual data", + }, + Keywords: []string{ + "person", "profile", "identity", "individual", + "user", "account", "record", "contact", + "name", "details", "attributes", "information", + "bio", "demographics", "personal", "data", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return person(f), nil + }, + }) + + // full name + AddFuncLookup("name", Info{ + Display: "Name", + Category: "person", + Description: "The given and family name of an individual", + Example: "Markus Moen", + Output: "string", + Aliases: []string{ + "full name", + "person name", + "complete name", + "name string", + "display name", + }, + Keywords: []string{ + "name", "fullname", "given", "family", + "first", "last", "forename", "surname", + "display", "legal", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return name(f), nil }, + }) + + // name prefix (honorific) + AddFuncLookup("nameprefix", Info{ + Display: "Name Prefix", + Category: "person", + Description: "A title or honorific added before a person's name", + Example: "Mr.", + Output: "string", + Aliases: []string{ + "name prefix", + "honorific", + "title prefix", + "courtesy title", + "pre-nominal", + }, + Keywords: []string{ + "prefix", "title", "mr", "ms", "mrs", + "dr", "prof", "sir", "madam", "rev", "fr", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return namePrefix(f), nil }, + }) + + // name suffix (generational/professional) + AddFuncLookup("namesuffix", Info{ + Display: "Name Suffix", + Category: "person", + Description: "A title or designation added after a person's name", + Example: "Jr.", + Output: "string", + Aliases: []string{ + "name suffix", + "post nominal", + "suffix designation", + "generational suffix", + "professional suffix", + }, + Keywords: []string{ + "suffix", "jr", "sr", "iii", "iv", + "esq", "phd", "md", "mba", "cpa", + "designation", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return nameSuffix(f), nil }, + }) + + // first name + AddFuncLookup("firstname", Info{ + Display: "First Name", + Category: "person", + Description: "The name given to a person at birth", + Example: "Markus", + Output: "string", + Aliases: []string{ + "first name", + "given name", + "forename", + "personal name", + "given label", + }, + Keywords: []string{ + "first", "given", "name", + "preferred", "callname", "initial", + "personal", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return firstName(f), nil }, + }) + + // middle name + AddFuncLookup("middlename", Info{ + Display: "Middle Name", + Category: "person", + Description: "Name between a person's first name and last name", + Example: "Belinda", + Output: "string", + Aliases: []string{ + "middle name", + "second name", + "additional name", + "secondary name", + "middle initial label", + }, + Keywords: []string{ + "middle", "second", "additional", "secondary", + "name", "initial", "intermediate", "optional", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return middleName(f), nil }, + }) + + // last name + AddFuncLookup("lastname", Info{ + Display: "Last Name", + Category: "person", + Description: "The family name or surname of an individual", + Example: "Daniel", + Output: "string", + Aliases: []string{ + "last name", + "family name", + "surname", + "patronymic", + "family designation", + }, + Keywords: []string{ + "last", "family", "name", + "lineage", "heritage", "ancestry", "clan", + "tribe", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return lastName(f), nil }, + }) + + // gender (keep terms neutral and search-friendly) + AddFuncLookup("gender", Info{ + Display: "Gender", + Category: "person", + Description: "Classification that identifies gender", + Example: "male", + Output: "string", + Aliases: []string{ + "gender identity", + "gender label", + "sex category", + "gender marker", + "presentation", + }, + Keywords: []string{ + "gender", "male", "female", "nonbinary", + "identity", "label", "category", "sex", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return gender(f), nil }, + }) + + // ssn (us) + AddFuncLookup("ssn", Info{ + Display: "SSN", + Category: "person", + Description: "Unique nine-digit identifier used for government and financial purposes in the United States", + Example: "296446360", + Output: "string", + Aliases: []string{ + "social security number", + "ssn number", + "us ssn", + "tax id us", + "federal id", + }, + Keywords: []string{ + "ssn", "social", "security", "number", + "us", "tax", "irs", "employment", + "benefits", "identification", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return ssn(f), nil }, + }) + + AddFuncLookup("ein", Info{ + Display: "EIN", + Category: "person", + Description: "Nine-digit Employer Identification Number used by businesses for tax purposes", + Example: "12-3456789", + Output: "string", + Aliases: []string{ + "employer id", + "tax id", + "business tax id", + "federal tax id", + "irs number", + }, + Keywords: []string{ + "ein", "employer", "identification", "tax", "business", "federal", "irs", "number", "id", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return ein(f), nil }, + }) + + // hobby + AddFuncLookup("hobby", Info{ + Display: "Hobby", + Category: "person", + Description: "An activity pursued for leisure and pleasure", + Example: "Swimming", + Output: "string", + Aliases: []string{ + "pastime", + "leisure activity", + "recreational activity", + "interest", + "free-time pursuit", + }, + Keywords: []string{ + "hobby", "leisure", "recreation", + "activity", "sport", "craft", + "game", "collection", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return hobby(f), nil }, + }) + + // email + AddFuncLookup("email", Info{ + Display: "Email", + Category: "person", + Description: "Electronic mail address", + Example: "markusmoen@pagac.net", + Output: "string", + Aliases: []string{ + "email address", + "mail address", + "contact email", + "user email", + "electronic mailbox", + }, + Keywords: []string{ + "email", "address", "mail", "inbox", + "account", "contact", "sender", "recipient", + "domain", "username", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return email(f), nil }, + }) + + // phone (raw digits) + AddFuncLookup("phone", Info{ + Display: "Phone", + Category: "person", + Description: "Numerical sequence used to contact individuals via telephone or mobile devices", + Example: "6136459948", + Output: "string", + Aliases: []string{ + "phone number", + "telephone number", + "mobile number", + "contact number", + "voice number", + }, + Keywords: []string{ + "phone", "number", "telephone", "mobile", + "contact", "dial", "cell", "landline", + "e164", "voice", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return phone(f), nil }, + }) + + // phone formatted (readable) + AddFuncLookup("phoneformatted", Info{ + Display: "Phone Formatted", + Category: "person", + Description: "Formatted phone number of a person", + Example: "136-459-9489", + Output: "string", + Aliases: []string{ + "formatted phone", + "pretty phone", + "display phone", + "readable phone", + "formatted telephone", + }, + Keywords: []string{ + "phone", "formatted", "format", "pattern", + "dashes", "parentheses", "spaces", "separators", + "telephone", "contact", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { return phoneFormatted(f), nil }, + }) + + AddFuncLookup("teams", Info{ + Display: "Teams", + Category: "person", + Description: "Randomly split people into teams", + Example: `{ + "Team 1": [ + "Justin", + "Connor", + "Jeff" + ], + "Team 2": [ + "Sharon", + "Fabian", + "Billy" + ], + "Team 3": [ + "Steve", + "Robert" + ] +}`, + Output: "map[string][]string", + ContentType: "application/json", + Aliases: []string{ + "people grouping", + "team assignment", + "random partition", + "group allocator", + "roster builder", + }, + Keywords: []string{ + "teams", "randomly", "person", "into", + "distribution", "allocation", "roster", "squad", + }, + Params: []Param{ + {Field: "people", Display: "Strings", Type: "[]string", Description: "Array of people"}, + {Field: "teams", Display: "Strings", Type: "[]string", Description: "Array of teams"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + people, err := info.GetStringArray(m, "people") + if err != nil { + return nil, err + } + + teamsArray, err := info.GetStringArray(m, "teams") + if err != nil { + return nil, err + } + + return teams(f, people, teamsArray), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/product.go b/vendor/github.com/brianvoe/gofakeit/v7/product.go new file mode 100644 index 0000000000..782ad00bf1 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/product.go @@ -0,0 +1,646 @@ +package gofakeit + +import ( + "fmt" + "strconv" + "strings" + + "github.com/brianvoe/gofakeit/v7/data" +) + +type ProductInfo struct { + Name string `json:"name" xml:"name"` + Description string `json:"description" xml:"description"` + Categories []string `json:"categories" xml:"categories"` + Price float64 `json:"price" xml:"price"` + Features []string `json:"features" xml:"features"` + Color string `json:"color" xml:"color"` + Material string `json:"material" xml:"material"` + UPC string `json:"upc" xml:"upc"` + Audience []string `json:"audience" xml:"audience"` + Dimension string `json:"dimension" xml:"dimension"` + UseCase string `json:"use_case" xml:"use_case"` + Benefit string `json:"benefit" xml:"benefit"` + Suffix string `json:"suffix" xml:"suffix"` +} + +// Product will generate a random set of product information +func Product() *ProductInfo { return product(GlobalFaker) } + +// Product will generate a random set of product information +func (f *Faker) Product() *ProductInfo { return product(f) } + +func product(f *Faker) *ProductInfo { + // Categories + categories := []string{} + weightedCategory, _ := weighted(f, []any{1, 2, 3, 4}, []float32{1, 4, 3, 4}) + + for i := 0; i < weightedCategory.(int); i++ { + categories = append(categories, productCategory(f)) + } + + // Features + features := []string{} + for i := 0; i < number(f, 1, 5); i++ { + features = append(features, productFeature(f)) + } + + product := &ProductInfo{ + Name: productName(f), + Description: productDescription(f), + Categories: categories, + Price: price(f, 3.00, 100.00), + UPC: productUPC(f), + Features: features, + Color: safeColor(f), + Material: productMaterial(f), + Audience: productAudience(f), + Dimension: productDimension(f), + UseCase: productUseCase(f), + Benefit: productBenefit(f), + Suffix: productSuffix(f), + } + + return product +} + +// ProductName will generate a random product name +func ProductName() string { return productName(GlobalFaker) } + +// ProductName will generate a random product name +func (f *Faker) ProductName() string { return productName(f) } + +func productName(f *Faker) string { + name := getRandValue(f, []string{"product", "name"}) + switch number(f, 0, 9) { + case 1: + // Name + Adjective + Feature + return title(fmt.Sprintf("%s %s %s", name, getRandValue(f, []string{"product", "adjective"}), productFeature(f))) + case 2: + // Adjective + Material + Name + return title(fmt.Sprintf("%s %s %s", getRandValue(f, []string{"product", "adjective"}), productMaterial(f), name)) + case 3: + // Color + Name + Suffix + return title(fmt.Sprintf("%s %s %s", safeColor(f), name, getRandValue(f, []string{"product", "suffix"}))) + case 4: + // Feature + Name + Adjective + return title(fmt.Sprintf("%s %s %s", productFeature(f), name, getRandValue(f, []string{"product", "adjective"}))) + case 5: + // Material + Color + Name + return title(fmt.Sprintf("%s %s %s", productMaterial(f), safeColor(f), name)) + case 6: + // Name + Suffix + Material + return title(fmt.Sprintf("%s %s %s", name, getRandValue(f, []string{"product", "suffix"}), productMaterial(f))) + case 7: + // Adjective + Feature + Name + return title(fmt.Sprintf("%s %s %s", getRandValue(f, []string{"product", "adjective"}), productFeature(f), name)) + case 8: + // Color + Material + Name + return title(fmt.Sprintf("%s %s %s", safeColor(f), productMaterial(f), name)) + case 9: + // Suffix + Adjective + Name + return title(fmt.Sprintf("%s %s %s", getRandValue(f, []string{"product", "suffix"}), getRandValue(f, []string{"product", "adjective"}), name)) + } + + // case: 0 - Adjective + Name + Suffix + return title(fmt.Sprintf("%s %s %s", getRandValue(f, []string{"product", "adjective"}), name, getRandValue(f, []string{"product", "suffix"}))) +} + +// ProductDescription will generate a random product description +func ProductDescription() string { return productDescription(GlobalFaker) } + +// ProductDescription will generate a random product description +func (f *Faker) ProductDescription() string { return productDescription(f) } + +func productDescription(f *Faker) string { + prodDesc := getRandValue(f, []string{"product", "description"}) + + // Replace all {productaudience} with join "and" + for strings.Contains(prodDesc, "{productaudience}") { + prodDesc = strings.Replace(prodDesc, "{productaudience}", strings.Join(productAudience(f), " and "), 1) + } + + desc, _ := generate(f, prodDesc) + return desc +} + +// ProductCategory will generate a random product category +func ProductCategory() string { return productCategory(GlobalFaker) } + +// ProductCategory will generate a random product category +func (f *Faker) ProductCategory() string { return productCategory(f) } + +func productCategory(f *Faker) string { + return getRandValue(f, []string{"product", "category"}) +} + +// ProductFeature will generate a random product feature +func ProductFeature() string { return productFeature(GlobalFaker) } + +// ProductFeature will generate a random product feature +func (f *Faker) ProductFeature() string { return productFeature(f) } + +func productFeature(f *Faker) string { + return getRandValue(f, []string{"product", "feature"}) +} + +// ProductMaterial will generate a random product material +func ProductMaterial() string { return productMaterial(GlobalFaker) } + +// ProductMaterial will generate a random product material +func (f *Faker) ProductMaterial() string { return productMaterial(f) } + +func productMaterial(f *Faker) string { + return getRandValue(f, []string{"product", "material"}) +} + +// ProductUPC will generate a random product UPC +func ProductUPC() string { return productUPC(GlobalFaker) } + +// ProductUPC will generate a random product UPC +func (f *Faker) ProductUPC() string { return productUPC(f) } + +func productUPC(f *Faker) string { + // The first digit of a UPC is a fixed digit (usually 0) + upc := "0" + + // Generate the remaining 11 digits randomly + for i := 1; i < 12; i++ { + digit := number(f, 0, 9) + upc += fmt.Sprintf("%d", digit) + } + + return upc +} + +// ProductAudience will generate a random target audience +func ProductAudience() []string { return productAudience(GlobalFaker) } + +// ProductAudience will generate a random target audience +func (f *Faker) ProductAudience() []string { return productAudience(f) } + +func productAudience(f *Faker) []string { + audiences := []string{} + for i := 0; i < number(f, 1, 2); i++ { + // Check if the target audience is already in the list + // If it is, generate a new target audience + for { + audience := getRandValue(f, []string{"product", "target_audience"}) + // Check if in array + if !stringInSlice(audience, audiences) { + audiences = append(audiences, audience) + break + } + } + } + return audiences +} + +// ProductDimension will generate a random product dimension +func ProductDimension() string { return productDimension(GlobalFaker) } + +// ProductDimension will generate a random product dimension +func (f *Faker) ProductDimension() string { return productDimension(f) } + +func productDimension(f *Faker) string { + return getRandValue(f, []string{"product", "dimension"}) +} + +// ProductUseCase will generate a random product use case +func ProductUseCase() string { return productUseCase(GlobalFaker) } + +// ProductUseCase will generate a random product use case +func (f *Faker) ProductUseCase() string { return productUseCase(f) } + +func productUseCase(f *Faker) string { + return getRandValue(f, []string{"product", "use_case"}) +} + +// ProductBenefit will generate a random product benefit +func ProductBenefit() string { return productBenefit(GlobalFaker) } + +// ProductBenefit will generate a random product benefit +func (f *Faker) ProductBenefit() string { return productBenefit(f) } + +func productBenefit(f *Faker) string { + return getRandValue(f, []string{"product", "benefit"}) +} + +// ProductSuffix will generate a random product suffix +func ProductSuffix() string { return productSuffix(GlobalFaker) } + +// ProductSuffix will generate a random product suffix +func (f *Faker) ProductSuffix() string { return productSuffix(f) } + +func productSuffix(f *Faker) string { + return getRandValue(f, []string{"product", "suffix"}) +} + +// ProductISBN13 will generate a random ISBN-13 string for the product +func ProductISBN(opts *ISBNOptions) string { return productISBN(GlobalFaker, opts) } + +// ProductISBN13 will generate a random ISBN-13 string for the product +func (f *Faker) ProductISBN(opts *ISBNOptions) string { return productISBN(f, opts) } + +type ISBNOptions struct { + Version string // "10" or "13" + Separator string // e.g. "-", "" (default: "-") +} + +func productISBN(f *Faker, opts *ISBNOptions) string { + if opts == nil { + opts = &ISBNOptions{Version: "13", Separator: "-"} + } + + sep := opts.Separator + if sep == "" { + sep = "-" + } + + // string of n random digits + randomDigits := func(f *Faker, n int) string { + digits := make([]byte, n) + for i := 0; i < n; i++ { + digits[i] = byte('0' + number(f, 0, 9)) + } + return string(digits) + } + + switch opts.Version { + case "10": + // ISBN-10 format: group(1)-registrant(4)-publication(3)-check(1) + group := randomDigits(f, 1) + registrant := randomDigits(f, 4) + publication := randomDigits(f, 3) + base := group + registrant + publication + + // checksum + sum := 0 + for i, c := range base { + digit := int(c - '0') + sum += digit * (10 - i) + } + remainder := (11 - (sum % 11)) % 11 + check := "X" + if remainder < 10 { + check = strconv.Itoa(remainder) + } + + return strings.Join([]string{group, registrant, publication, check}, sep) + + case "13": + // ISBN-13 format: prefix(3)-group(1)-registrant(4)-publication(4)-check(1) + prefix := data.ISBN13Prefix + group := randomDigits(f, 1) + registrant := randomDigits(f, 4) + publication := randomDigits(f, 4) + base := prefix + group + registrant + publication + + // checksum + sum := 0 + for i, c := range base { + digit := int(c - '0') + if i%2 == 0 { + sum += digit + } else { + sum += digit * 3 + } + } + remainder := (10 - (sum % 10)) % 10 + check := strconv.Itoa(remainder) + + return strings.Join([]string{prefix, group, registrant, publication, check}, sep) + + default: + // fallback to ISBN-13 if invalid version provided + return productISBN(f, &ISBNOptions{Version: "13", Separator: sep}) + } +} + +func addProductLookup() { + AddFuncLookup("product", Info{ + Display: "Product", + Category: "product", + Description: "An item created for sale or use", + Example: `{ + "name": "olive copper monitor", + "description": "Backwards caused quarterly without week it hungry thing someone him regularly. Whomever this revolt hence from his timing as quantity us these yours.", + "categories": [ + "clothing", + "tools and hardware" + ], + "price": 7.61, + "features": [ + "ultra-lightweight" + ], + "color": "navy", + "material": "brass", + "upc": "012780949980", + "audience": [ + "adults" + ], + "dimension": "medium", + "use_case": "home", + "benefit": "comfort", + "suffix": "pro" +}`, + Output: "map[string]any", + ContentType: "application/json", + Aliases: []string{ + "goods", + "merchandise", + "retail item", + "consumer product", + "commercial item", + }, + Keywords: []string{ + "product", "sale", "use", "trade", "manufactured", + "market", "inventory", "supply", "distribution", "commodity", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return product(f), nil + }, + }) + + AddFuncLookup("productname", Info{ + Display: "Product Name", + Category: "product", + Description: "Distinctive title or label assigned to a product for identification and marketing", + Example: "olive copper monitor", + Output: "string", + Aliases: []string{ + "product title", + "product label", + "brand name", + "item name", + "product identifier", + }, + Keywords: []string{ + "product", "name", "title", "label", "brand", + "item", "merchandise", "goods", "article", + "identifier", "marketing", "branding", + "catalog", "inventory", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productName(f), nil + }, + }) + + AddFuncLookup("productdescription", Info{ + Display: "Product Description", + Category: "product", + Description: "Explanation detailing the features and characteristics of a product", + Example: "Backwards caused quarterly without week it hungry thing someone him regularly. Whomever this revolt hence from his timing as quantity us these yours.", + Output: "string", + Aliases: []string{ + "product details", + "product specs", + "item description", + "feature list", + "marketing copy", + }, + Keywords: []string{ + "product", "description", "details", "features", + "specifications", "characteristics", "summary", + "overview", "attributes", "benefits", + "marketing", "content", "copy", "info", "text", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productDescription(f), nil + }, + }) + + AddFuncLookup("productcategory", Info{ + Display: "Product Category", + Category: "product", + Description: "Classification grouping similar products based on shared characteristics or functions", + Example: "clothing", + Output: "string", + Aliases: []string{ + "product classification", + "product type", + "item category", + "product group", + "product segment", + }, + Keywords: []string{ + "product", "category", "type", "class", "classification", + "group", "segment", "line", "collection", "range", + "electronics", "furniture", "clothing", "appliances", + "food", "toys", "accessories", "goods", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productCategory(f), nil + }, + }) + + AddFuncLookup("productfeature", Info{ + Display: "Product Feature", + Category: "product", + Description: "Specific characteristic of a product that distinguishes it from others products", + Example: "ultra-lightweight", + Output: "string", + Aliases: []string{ + "product trait", + "product attribute", + "key feature", + "unique feature", + "special characteristic", + }, + Keywords: []string{ + "feature", "trait", "attribute", "characteristic", + "capability", "functionality", "specification", + "benefit", "advantage", "highlight", + "unique", "differentiator", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productFeature(f), nil + }, + }) + + AddFuncLookup("productmaterial", Info{ + Display: "Product Material", + Category: "product", + Description: "The substance from which a product is made, influencing its appearance, durability, and properties", + Example: "brass", + Output: "string", + Aliases: []string{ + "material type", + "product substance", + "product composition", + "item material", + "build material", + }, + Keywords: []string{ + "material", "substance", "composition", "make", + "fabric", "textile", "cloth", "leather", "wool", + "wood", "metal", "plastic", "glass", "stone", + "durability", "properties", "construction", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productMaterial(f), nil + }, + }) + + AddFuncLookup("productupc", Info{ + Display: "Product UPC", + Category: "product", + Description: "Standardized barcode used for product identification and tracking in retail and commerce", + Example: "012780949980", + Output: "string", + Aliases: []string{ + "upc code", + "product barcode", + "product code", + "product sku", + "universal product code", + "retail barcode", + }, + Keywords: []string{ + "upc", "barcode", "product", "code", "identifier", + "sku", "retail", "commerce", "inventory", + "tracking", "scanning", "checkout", "label", + "universal", "standard", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productUPC(f), nil + }, + }) + + AddFuncLookup("productaudience", Info{ + Display: "Product Audience", + Category: "product", + Description: "The group of people for whom the product is designed or intended", + Example: "adults", + Output: "[]string", + Aliases: []string{ + "target audience", + "target market", + "customer group", + "user base", + "demographic group", + }, + Keywords: []string{ + "audience", "market", "segment", "demographic", + "consumer", "customer", "buyer", "user", + "group", "target", "population", "adults", + "kids", "teens", "families", "professionals", + }, + + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productAudience(f), nil + }, + }) + + AddFuncLookup("productdimension", Info{ + Display: "Product Dimension", + Category: "product", + Description: "The size or dimension of a product", + Example: "medium", + Output: "string", + Aliases: []string{ + "product size", + "product measurement", + "item dimensions", + "product scale", + "size specification", + }, + Keywords: []string{ + "dimension", "size", "measurement", "proportion", + "scale", "specification", "specs", "length", + "width", "height", "depth", "volume", "weight", + "product", "item", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productDimension(f), nil + }, + }) + + AddFuncLookup("productusecase", Info{ + Display: "Product Use Case", + Category: "product", + Description: "The scenario or purpose for which a product is typically used", + Example: "home", + Output: "string", + Aliases: []string{ + "use case", + "product purpose", + "intended use", + "product application", + "usage scenario", + }, + Keywords: []string{ + "use", "usecase", "purpose", "usage", "application", + "context", "scenario", "situation", "case", + "intention", "goal", "objective", "function", + "home", "office", "outdoor", "industrial", "commercial", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productUseCase(f), nil + }, + }) + + AddFuncLookup("productbenefit", Info{ + Display: "Product Benefit", + Category: "product", + Description: "The key advantage or value the product provides", + Example: "comfort", + Output: "string", + Aliases: []string{ + "product advantage", + "product value", + "user benefit", + "customer gain", + "selling point", + }, + Keywords: []string{ + "benefit", "advantage", "value", "improvement", + "enhancement", "feature", "positive", "outcome", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productBenefit(f), nil + }, + }) + + AddFuncLookup("productsuffix", Info{ + Display: "Product Suffix", + Category: "product", + Description: "A suffix used to differentiate product models or versions", + Example: "pro", + Output: "string", + Aliases: []string{ + "product suffix", + "model suffix", + "version suffix", + "edition suffix", + "name suffix", + }, + Keywords: []string{ + "suffix", "variant", "edition", "version", "model", + "series", "line", "tier", "release", "upgrade", + "plus", "pro", "max", "lite", "mini", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productSuffix(f), nil + }, + }) + + AddFuncLookup("productisbn", Info{ + Display: "Product ISBN", + Category: "product", + Description: "ISBN-10 or ISBN-13 identifier for books", + Example: "978-1-4028-9462-6", + Output: "string", + Aliases: []string{ + "isbn code", "isbn number", "book isbn", "isbn13", + "isbn10", "publication code", "book identifier", + }, + Keywords: []string{ + "identifier", "publication", "library", "catalog", + "literature", "reference", "edition", "registration", "publishing", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return productISBN(f, nil), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/school.go b/vendor/github.com/brianvoe/gofakeit/v7/school.go new file mode 100644 index 0000000000..e153e2dc15 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/school.go @@ -0,0 +1,31 @@ +package gofakeit + +// School will generate a random School type +func School() string { return school(GlobalFaker) } + +func (f *Faker) School() string { return school(f) } + +func school(f *Faker) string { + return getRandValue(f, []string{"school", "name"}) + " " + + getRandValue(f, []string{"school", "isPrivate"}) + " " + + getRandValue(f, []string{"school", "type"}) +} + +func addSchoolLookup() { + AddFuncLookup("school", Info{ + Display: "School", + Category: "school", + Description: "An institution for formal education and learning", + Example: `Harborview State Academy`, + Output: "string", + Aliases: []string{ + "academy", "educational institute", "learning center", "training school", "academic institution", + }, + Keywords: []string{ + "institution", "education", "learning", "teaching", "university", "college", "campus", "classroom", "study", "pupil", "curriculum", "instruction", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return school(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/slice.go b/vendor/github.com/brianvoe/gofakeit/v7/slice.go new file mode 100644 index 0000000000..b13fff0368 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/slice.go @@ -0,0 +1,19 @@ +package gofakeit + +import ( + "reflect" +) + +// Slice fills built-in types and exported fields of a struct with random data. +func Slice(v any) { sliceFunc(GlobalFaker, v) } + +// Slice fills built-in types and exported fields of a struct with random data. +func (f *Faker) Slice(v any) { sliceFunc(f, v) } + +func sliceFunc(f *Faker, v any) { + // Note: We intentionally call r with size -1 instead of using structFunc. + // structFunc starts with size 0, which would result in zero-length top-level + // slices and maps. Passing -1 lets rSlice/rMap auto-size (random length) + // when no fakesize tag is provided. + r(f, reflect.TypeOf(v), reflect.ValueOf(v), "", -1, 0) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/song.go b/vendor/github.com/brianvoe/gofakeit/v7/song.go new file mode 100644 index 0000000000..2b6cc1fd5d --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/song.go @@ -0,0 +1,131 @@ +package gofakeit + +func SongName() string { return songName(GlobalFaker) } + +func (f *Faker) SongName() string { return songName(f) } + +func songName(f *Faker) string { return getRandValue(f, []string{"song", "name"}) } + +func SongArtist() string { return songArtist(GlobalFaker) } + +func (f *Faker) SongArtist() string { return songArtist(f) } + +func songArtist(f *Faker) string { return getRandValue(f, []string{"song", "artist"}) } + +func SongGenre() string { return songGenre(GlobalFaker) } + +func (f *Faker) SongGenre() string { return songGenre(f) } + +func songGenre(f *Faker) string { return getRandValue(f, []string{"song", "genre"}) } + +type SongInfo struct { + Name string `json:"name" xml:"name"` + Artist string `json:"artist" xml:"artist"` + Genre string `json:"genre" xml:"genre"` +} + +func Song() *SongInfo { return song(GlobalFaker) } + +func (f *Faker) Song() *SongInfo { return song(f) } + +func song(f *Faker) *SongInfo { + return &SongInfo{ + Name: songName(f), + Artist: songArtist(f), + Genre: songGenre(f), + } +} + +func addSongLookup() { + AddFuncLookup("song", Info{ + Display: "Song", + Category: "song", + Description: "Song with a drum and horn instrumentation", + Example: `{ + "name": "New Rules", + "genre": "Tropical house" +}`, + Output: "map[string]string", + ContentType: "application/json", + Aliases: []string{ + "musical work", + "sound piece", + "single release", + "record title", + "audio selection", + }, + Keywords: []string{ + "song", "music", "track", "tune", "melody", + "artist", "genre", "name", "composition", "recording", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return song(f), nil + }, + }) + + AddFuncLookup("songname", Info{ + Display: "Song Name", + Category: "song", + Description: "Title or name of a specific song used for identification and reference", + Example: "New Rules", + Output: "string", + Aliases: []string{ + "song title", + "track name", + "music title", + "song label", + }, + Keywords: []string{ + "song", "title", "name", "track", "music", + "single", "hit", "tune", "recording", + "composition", "melody", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return songName(f), nil + }, + }) + + AddFuncLookup("songartist", Info{ + Display: "Song Artist", + Category: "song", + Description: "The artist of maker of song", + Example: "Dua Lipa", + Output: "string", + Aliases: []string{ + "performer name", + "music act", + "band name", + "recording artist", + "song creator", + }, + Keywords: []string{ + "song", "artist", "singer", "musician", "composer", + "band", "producer", "vocalist", "group", "instrumentalist", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return songArtist(f), nil + }, + }) + + AddFuncLookup("songgenre", Info{ + Display: "Genre", + Category: "song", + Description: "Category that classifies song based on common themes, styles, and storytelling approaches", + Example: "Action", + Output: "string", + Aliases: []string{ + "music style", + "song category", + "musical classification", + "sound type", + "genre label", + }, + Keywords: []string{ + "song", "genre", "style", "category", "type", + "classification", "theme", "musical", "subgenre", "influence", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return songGenre(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/source/BENCHMARKS.md b/vendor/github.com/brianvoe/gofakeit/v7/source/BENCHMARKS.md new file mode 100644 index 0000000000..f582b3d3e3 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/source/BENCHMARKS.md @@ -0,0 +1,16 @@ +go test -bench=. -benchmem \ +goos: darwin \ +goarch: amd64 \ +pkg: github.com/brianvoe/gofakeit/v7 \ +cpu: Apple M1 Max \ +Table generated with tablesgenerator.com/markdown_tables File->Paste table data + +| Benchmark | Iterations| Time/Iter | Bytes | Allocations | +|---------------------|-----------|-------------|--------|-------------| +| BenchmarkPCG-10 | 251946703 | 4.763 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkChaCha8-10 | 228052915 | 5.262 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkJSF-10 | 323858558 | 3.712 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkSFC-10 | 394809136 | 3.035 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkOld-10 | 207714157 | 5.733 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkDumb-10 | 458967214 | 2.611 ns/op | 0 B/op | 0 allocs/op | +| BenchmarkCrypto-10 | 15747936 | 77.15 ns/op | 0 B/op | 0 allocs/op | \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/v7/source/README.md b/vendor/github.com/brianvoe/gofakeit/v7/source/README.md new file mode 100644 index 0000000000..3358ebea03 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/source/README.md @@ -0,0 +1,65 @@ +# Random Number Generators Collection + +This repository contains a collection of random number generators (RNGs) implemented in Go, designed to cater to a wide range of applications, from cryptographic operations to testing environments. Each RNG in the collection offers distinct features and performance characteristics, making it suitable for various use cases, including those requiring cryptographic security. + +## Generators + +### Crypto + +- **Description**: Utilizes Go's `crypto/rand` package to provide cryptographically secure random numbers, suitable for security-sensitive applications. +- **Usage**: + ```go + source := NewCryptoSource() + number := source.Uint64() + ``` + +### JSF (Jenkins Small Fast) + +- **Description**: An implementation of the Jenkins Small Fast hash function for efficient pseudo-random number generation, balancing speed and randomness quality for general use. +- **Usage**: + ```go + source := NewJSFSource(seed) + number := source.Uint64() + ``` + +### SFC (Simple Fast Counter) + +- **Description**: Based on the Simple Fast Counter algorithm, this source offers rapid number generation with satisfactory randomness properties, ideal for simulations and non-cryptographic applications. +- **Usage**: + ```go + source := NewSFCSource(seed) + number := source.Uint64() + ``` + +### Dumb + +- **Description**: A deterministic generator designed primarily for testing, providing predictable output for scenarios where consistent results are more beneficial than high-quality randomness. +- **Usage**: + ```go + source := NewDumb(seed) + number := source.Uint64() + ``` + +## Installation + +To use these RNGs in your Go project, import the package as follows: + +```go +import "github.com/yourusername/randsource" +``` + +Replace `yourusername` with your GitHub username or organization name where the repository is hosted. + +## Usage + +After importing the package, initialize the desired RNG with or without a seed (as applicable) and use the `Uint64` method to generate random numbers. See the usage examples under each generator's description for more details. + +## Benchmarks + +Performance benchmarks for each RNG are provided to help you choose the right generator for your application. These benchmarks cover various aspects, including speed and randomness quality. + +For detailed benchmark results, see the [Benchmarks](https://github.com/brianvoe/gofakeit/blob/master/source/BENCHMARKS.md) file. + +## Contributing + +We welcome contributions and suggestions! Please open an issue or submit a pull request with your improvements. diff --git a/vendor/github.com/brianvoe/gofakeit/v7/source/crypto.go b/vendor/github.com/brianvoe/gofakeit/v7/source/crypto.go new file mode 100644 index 0000000000..9563409263 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/source/crypto.go @@ -0,0 +1,55 @@ +package source + +import ( + "crypto/rand" + "encoding/binary" +) + +// Package source implements a cryptographically secure pseudo-random number generator (CSPRNG) +// using Go's crypto/rand. The Crypto type is designed for generating high-quality random +// uint64 values, suitable for cryptographic applications like secure token generation, +// cryptographic key creation, and other security-sensitive operations. It offers optional +// thread safety through a locking mechanism, making it suitable for concurrent usage. + +// Pros: +// - Provides cryptographically secure randomness, suitable for security-sensitive applications. +// - Optional thread safety with locking, enabling safe concurrent access. + +// Cons: +// - Locking mechanism, when enabled, may introduce performance overhead. +// - Does not utilize a seed, as it leverages the system's cryptographic RNG, which may be a +// limitation in scenarios where deterministic pseudo-randomness is desired. + +type Crypto struct { + buffer [64]byte // Buffer to hold a block of random data + offset int // Current offset in the buffer +} + +// NewCrypto creates a new instance of Crypto. +func NewCrypto() *Crypto { + return &Crypto{ + buffer: [64]byte{}, // Initialize buffer with zeros + offset: 64, // Set offset to the end of the buffer to trigger a refill on the first call + } +} + +// refillBuffer fills the buffer with random data from crypto/rand. +func (s *Crypto) refillBuffer() { + if _, err := rand.Read(s.buffer[:]); err != nil { + panic("crypto/rand failed: " + err.Error()) // Handle the error appropriately for your application + } + s.offset = 0 // Reset offset after refilling +} + +// Uint64 generates a pseudo-random 64-bit value using crypto/rand, served from a buffered block of data. +func (s *Crypto) Uint64() uint64 { + if s.offset+8 > len(s.buffer) { // Check if we need to refill the buffer + s.refillBuffer() + } + + // Extract a uint64 value from the current position in the buffer + val := binary.BigEndian.Uint64(s.buffer[s.offset:]) + s.offset += 8 // Move the offset for the next call + + return val +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/source/dumb.go b/vendor/github.com/brianvoe/gofakeit/v7/source/dumb.go new file mode 100644 index 0000000000..784f55893f --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/source/dumb.go @@ -0,0 +1,44 @@ +package source + +import "time" + +// Dumb is a deterministic pseudo-random number generator designed specifically for testing purposes. +// It offers predictable sequences of numbers based on the provided seed, making it ideal for scenarios +// where consistent and reproducible test results are critical. By default, if initialized with a seed of 0, +// Dumb uses the current timestamp to generate a starting point, ensuring some level of variability between runs. + +// Pros: +// - Predictability: Ensures reproducible outcomes in tests by providing a consistent sequence of numbers for a given seed. +// - Simplicity: Easy to understand and integrate into testing frameworks, with minimal overhead. +// - Default Variability: Uses the current timestamp as the default seed, providing variability across different test runs when no seed is specified. + +// Cons: +// - Not Suitable for Production: Lacks the randomness quality required for production-level cryptographic or statistical applications. +// - Limited Randomness: The simple incrementation approach does not simulate the complexity of real-world random number generation. + +// Dumb is a simplistic generator for predictable testing. +type Dumb struct { + state uint64 +} + +// NewDumb initializes a Dumb generator. +// If the seed is 0, initializes with the current timestamp. +func NewDumb(seed uint64) *Dumb { + d := &Dumb{} + d.Seed(seed) + return d +} + +// Seed sets the generator's state. If the seed is 0, it uses the current timestamp as the seed. +func (d *Dumb) Seed(seed uint64) { + if seed == 0 { + seed = uint64(time.Now().UnixNano()) + } + d.state = seed +} + +// Uint64 returns the next number in the sequence, incrementing the state. +func (d *Dumb) Uint64() uint64 { + d.state += 1 + return d.state +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/source/jsf.go b/vendor/github.com/brianvoe/gofakeit/v7/source/jsf.go new file mode 100644 index 0000000000..1432d66280 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/source/jsf.go @@ -0,0 +1,50 @@ +package source + +// The JSF(Jenkins Small Fast) pseudo-random number generator. +// Developed by Bob Jenkins, JSF is known for its speed and efficiency, making it suitable +// for applications requiring fast, non-cryptographic quality random numbers. This implementation +// offers seamless integration with Go's math/rand package and includes an improved seeding mechanism. + +// Pros: +// - Fast and efficient, ideal for high-performance requirements. +// - Good randomness quality for non-cryptographic applications. +// - Small state size and simple operations, ensuring a minimal memory footprint. + +// Cons: +// - Not suitable for cryptographic purposes due to its non-cryptographic security level. +// - Quality of randomness may not match that of more complex algorithms. + +type JSF struct { + a, b, c, d uint32 +} + +// NewJSF creates and returns a new JSF pseudo-random number generator. +func NewJSF(seed uint64) *JSF { + jsf := &JSF{} + jsf.Seed(seed) + return jsf +} + +// Seed sets the seed of the JSF with an improved seeding mechanism. +func (jsf *JSF) Seed(seed uint64) { + // Use the seed to derive initial values for a, b, c, d with better distribution + // Splitting the 64-bit seed into parts and using different operations to diversify + s1 := uint32(seed) + s2 := uint32(seed >> 32) + jsf.a = 0xf1ea5eed + jsf.b = s1 ^ jsf.a + jsf.c = s2 ^ jsf.b + jsf.d = s1 +} + +// Uint64 generates a pseudo-random 64-bit value using the improved JSF algorithm. +func (jsf *JSF) Uint64() uint64 { + e := jsf.a - (jsf.b<<27 | jsf.b>>(32-27)) + f := jsf.b ^ (jsf.c << 17) + jsf.c += jsf.d + jsf.d += e + jsf.a = jsf.b + f + jsf.b = jsf.c + e + jsf.c = f + jsf.a + return uint64(jsf.d)<<32 | uint64(jsf.a) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/source/sfc.go b/vendor/github.com/brianvoe/gofakeit/v7/source/sfc.go new file mode 100644 index 0000000000..0357aeec62 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/source/sfc.go @@ -0,0 +1,44 @@ +package source + +// The SFC(Simple Fast Counter) algorithm is designed for fast and efficient generation of pseudo-random numbers, +// utilizing arithmetic and bitwise operations across state variables and a counter to ensure +// good randomness quality. It is particularly well-suited for applications requiring rapid +// number generation without the need for cryptographic security. + +// Pros: +// - High efficiency and speed, ideal for performance-sensitive applications. +// - Simple to implement and maintain, with minimal computational overhead. +// - Offers a balance between speed and randomness quality, suitable for a wide range of uses. + +// Cons: +// - Not designed for cryptographic applications due to its level of randomness. +// - Initial seeding mechanism is basic; may require enhancement for more complex use cases. + +type SFC struct { + a, b, c, counter uint64 +} + +// NewSFC creates and returns a new SFC pseudo-random number generator seeded with a given seed. +func NewSFC(seed uint64) *SFC { + s := &SFC{} + s.Seed(seed) + return s +} + +// Seed sets the seed of the SFC. This implementation can be enhanced to +// provide a more distributed seeding process across the state variables. +func (s *SFC) Seed(seed uint64) { + s.a = seed + s.b = seed + s.c = seed + s.counter = 1 // Reset counter with new seed +} + +// Uint64 generates a pseudo-random 64-bit value using the SFC algorithm. +func (s *SFC) Uint64() uint64 { + s.a += s.b + s.counter + s.b ^= s.c + s.c -= s.a + s.counter++ + return s.c + s.b +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/sql.go b/vendor/github.com/brianvoe/gofakeit/v7/sql.go new file mode 100644 index 0000000000..0d447c9e22 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/sql.go @@ -0,0 +1,163 @@ +package gofakeit + +import ( + "encoding/json" + "errors" + "fmt" + "strings" +) + +type SQLOptions struct { + Table string `json:"table" xml:"table"` // Table name we are inserting into + Count int `json:"count" xml:"count"` // How many entries (tuples) we're generating + Fields []Field `json:"fields" xml:"fields"` // The fields to be generated +} + +func SQL(so *SQLOptions) (string, error) { return sqlFunc(GlobalFaker, so) } + +func (f *Faker) SQL(so *SQLOptions) (string, error) { return sqlFunc(f, so) } + +func sqlFunc(f *Faker, so *SQLOptions) (string, error) { + if so.Table == "" { + return "", errors.New("must provide table name to generate SQL") + } + if len(so.Fields) <= 0 { + return "", errors.New(("must pass fields in order to generate SQL queries")) + } + if so.Count <= 0 { + return "", errors.New("must have entry count") + } + + var sb strings.Builder + sb.WriteString("INSERT INTO " + so.Table + " ") + + // Loop through each field and put together column names + var cols []string + for _, f := range so.Fields { + cols = append(cols, f.Name) + } + sb.WriteString("(" + strings.Join(cols, ", ") + ")") + + sb.WriteString(" VALUES ") + for i := 0; i < so.Count; i++ { + // Start opening value + sb.WriteString("(") + + // Now, we need to add all of our fields + var endStr string + for ii, field := range so.Fields { + // Set end of value string + endStr = ", " + if ii == len(so.Fields)-1 { + endStr = "" + } + + // If autoincrement, add based upon loop + if field.Function == "autoincrement" { + sb.WriteString(fmt.Sprintf("%d%s", i+1, endStr)) + continue + } + + // Get the function info for the field + funcInfo := GetFuncLookup(field.Function) + if funcInfo == nil { + return "", errors.New("invalid function, " + field.Function + " does not exist") + } + + // Generate the value + val, err := funcInfo.Generate(f, &field.Params, funcInfo) + if err != nil { + return "", err + } + + // Convert the output value to the proper SQL type + convertType := sqlConvertType(funcInfo.Output, val) + + // If its the last field, we need to close the value + sb.WriteString(convertType + endStr) + } + + // If its the last value, we need to close the value + if i == so.Count-1 { + sb.WriteString(");") + } else { + sb.WriteString("),") + } + } + + return sb.String(), nil +} + +// sqlConvertType will take in a type and value and convert it to the proper SQL type +func sqlConvertType(t string, val any) string { + switch t { + case "string": + return `'` + fmt.Sprintf("%v", val) + `'` + case "[]byte": + return `'` + fmt.Sprintf("%s", val) + `'` + default: + return fmt.Sprintf("%v", val) + } +} + +func addDatabaseSQLLookup() { + AddFuncLookup("sql", Info{ + Display: "SQL", + Category: "database", + Description: "Command in SQL used to add new data records into a database table", + Example: `INSERT INTO people + (id, first_name, price, age, created_at) +VALUES + (1, 'Markus', 804.92, 21, '1937-01-30 07:58:01'), + (2, 'Santino', 235.13, 40, '1964-07-07 22:25:40');`, + Output: "string", + ContentType: "application/sql", + Aliases: []string{ + "insert command", "database query", "sql statement", "record insert", "data query", + }, + Keywords: []string{ + "database", "insert", "command", "records", "table", "tuples", "rows", "data", "values", "query", + }, + Params: []Param{ + {Field: "table", Display: "Table", Type: "string", Description: "Name of the table to insert into"}, + {Field: "count", Display: "Count", Type: "int", Default: "100", Description: "Number of inserts to generate"}, + {Field: "fields", Display: "Fields", Type: "[]Field", Description: "Fields containing key name and function to run in json format"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + so := SQLOptions{} + + table, err := info.GetString(m, "table") + if err != nil { + return nil, err + } + so.Table = table + + count, err := info.GetInt(m, "count") + if err != nil { + return nil, err + } + so.Count = count + + fieldsStr, err := info.GetStringArray(m, "fields") + if err != nil { + return nil, err + } + + // Check to make sure fields has length + if len(fieldsStr) > 0 { + so.Fields = make([]Field, len(fieldsStr)) + + for i, f := range fieldsStr { + // Unmarshal fields string into fields array + err = json.Unmarshal([]byte(f), &so.Fields[i]) + if err != nil { + return nil, err + } + } + } + + return sqlFunc(f, &so) + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/string.go b/vendor/github.com/brianvoe/gofakeit/v7/string.go new file mode 100644 index 0000000000..46bf032ffb --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/string.go @@ -0,0 +1,325 @@ +package gofakeit + +// Letter will generate a single random lower case ASCII letter +func Letter() string { return letter(GlobalFaker) } + +// Letter will generate a single random lower case ASCII letter +func (f *Faker) Letter() string { return letter(f) } + +func letter(f *Faker) string { return string(randLetter(f)) } + +// LetterN will generate a random ASCII string with length N. Note that this function returns a string with a length of 1 when 0 is passed. +func LetterN(n uint) string { return letterN(GlobalFaker, n) } + +// LetterN will generate a random ASCII string with length N. Note that this function returns a string with a length of 1 when 0 is passed. +func (f *Faker) LetterN(n uint) string { return letterN(f, n) } + +func letterN(f *Faker, n uint) string { + // Make sure we dont use 0 + if n == 0 { + n = 1 + } + out := make([]rune, n) + for i := 0; i < int(n); i++ { + out[i] = randLetter(f) + } + return string(out) +} + +// Vowel will generate a single random lower case vowel +func Vowel() string { return vowel(GlobalFaker) } + +// Vowel will generate a single random lower case vowel +func (f *Faker) Vowel() string { return vowel(f) } + +func vowel(f *Faker) string { return string(randCharacter(f, vowels)) } + +// Digit will generate a single ASCII digit +func Digit() string { return digit(GlobalFaker) } + +// Digit will generate a single ASCII digit +func (f *Faker) Digit() string { return digit(f) } + +func digit(f *Faker) string { return string(randDigit(f)) } + +// DigitN will generate a random string of length N consists of ASCII digits. Note that the string generated can start with 0 and this function returns a string with a length of 1 when 0 is passed. +func DigitN(n uint) string { return digitN(GlobalFaker, n) } + +// DigitN will generate a random string of length N consists of ASCII digits. Note that the string generated can start with 0 and this function returns a string with a length of 1 when 0 is passed. +func (f *Faker) DigitN(n uint) string { return digitN(f, n) } + +func digitN(f *Faker, n uint) string { + // Make sure we dont use 0 + if n == 0 { + n = 1 + } + out := make([]rune, n) + for i := 0; i < int(n); i++ { + out[i] = randDigit(f) + } + return string(out) +} + +// Numerify will replace # with random numerical values +func Numerify(str string) string { return numerify(GlobalFaker, str) } + +// Numerify will replace # with random numerical values +func (f *Faker) Numerify(str string) string { return numerify(f, str) } + +func numerify(f *Faker, str string) string { return replaceWithNumbers(f, str) } + +// Lexify will replace ? with random generated letters +func Lexify(str string) string { return lexify(GlobalFaker, str) } + +// Lexify will replace ? with random generated letters +func (f *Faker) Lexify(str string) string { return lexify(f, str) } + +func lexify(f *Faker, str string) string { return replaceWithLetters(f, str) } + +// ShuffleStrings will randomize a slice of strings +func ShuffleStrings(a []string) { shuffleStrings(GlobalFaker, a) } + +// ShuffleStrings will randomize a slice of strings +func (f *Faker) ShuffleStrings(a []string) { shuffleStrings(f, a) } + +func shuffleStrings(f *Faker, a []string) { + swap := func(i, j int) { + a[i], a[j] = a[j], a[i] + } + //to avoid upgrading to 1.10 I copied the algorithm + n := len(a) + if n <= 1 { + return + } + + //if size is > int32 probably it will never finish, or ran out of entropy + i := n - 1 + for ; i > 0; i-- { + j := int(int32NFunc(f, int32(i+1))) + swap(i, j) + } +} + +// RandomString will take in a slice of string and return a randomly selected value +func RandomString(a []string) string { return randomString(GlobalFaker, a) } + +// RandomString will take in a slice of string and return a randomly selected value +func (f *Faker) RandomString(a []string) string { return randomString(f, a) } + +func randomString(f *Faker, a []string) string { + size := len(a) + if size == 0 { + return "" + } + if size == 1 { + return a[0] + } + return a[f.IntN(size)] +} + +func addStringLookup() { + AddFuncLookup("letter", Info{ + Display: "Letter", + Category: "string", + Description: "Character or symbol from the American Standard Code for Information Interchange (ASCII) character set", + Example: "g", + Output: "string", + Aliases: []string{ + "alphabet", "character", "text symbol", "ascii char", "alphabetical sign", + }, + Keywords: []string{ + "standard", "code", "information", "interchange", "set", "printable", "typography", "symbolic", "encoding", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return letter(f), nil + }, + }) + + AddFuncLookup("lettern", Info{ + Display: "LetterN", + Category: "string", + Description: "ASCII string with length N", + Example: "gbRMaRxHki", + Output: "string", + Aliases: []string{ + "random letters", "ascii string", "text sequence", "generated letters", "alphabetical string", + }, + Keywords: []string{ + "sequence", "multiple", "concatenated", "combined", "series", "generated", "batch", "collection", + }, + Params: []Param{ + {Field: "count", Display: "Count", Type: "uint", Description: "Number of digits to generate"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + ui, err := info.GetUint(m, "count") + if err != nil { + return nil, err + } + + return letterN(f, ui), nil + }, + }) + + AddFuncLookup("vowel", Info{ + Display: "Vowel", + Category: "string", + Description: "Speech sound produced with an open vocal tract", + Example: "a", + Output: "string", + Aliases: []string{ + "vocal sound", "speech letter", "phonetic vowel", "linguistic vowel", "spoken sound", + }, + Keywords: []string{ + "open", "e", "i", "o", "u", "phonetic", "linguistic", "articulation", "pronunciation", "syllable", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return vowel(f), nil + }, + }) + + AddFuncLookup("digit", Info{ + Display: "Digit", + Category: "string", + Description: "Numerical symbol used to represent numbers", + Example: "0", + Output: "string", + Aliases: []string{ + "number symbol", "numeric character", "decimal digit", "ascii number", "numerical sign", + }, + Keywords: []string{ + "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "decimal", "base10", "notation", "numeric", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return digit(f), nil + }, + }) + + AddFuncLookup("digitn", Info{ + Display: "DigitN", + Category: "string", + Description: "String of length N consisting of ASCII digits", + Example: "0136459948", + Output: "string", + Aliases: []string{ + "numeric string", "digit sequence", "number series", "generated digits", "ascii digits", + }, + Keywords: []string{ + "consisting", "multiple", "concatenated", "combined", "series", "numeric", "sequence", "continuous", "string", "digits", + }, + Params: []Param{ + {Field: "count", Display: "Count", Type: "uint", Description: "Number of digits to generate"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + ui, err := info.GetUint(m, "count") + if err != nil { + return nil, err + } + + return digitN(f, ui), nil + }, + }) + + AddFuncLookup("numerify", Info{ + Display: "Numerify", + Category: "string", + Description: "Replace # with random numerical values", + Example: "(###)###-#### => (555)867-5309", + Output: "string", + Aliases: []string{ + "hash replace", "number substitute", "pattern filler", "digit replacer", "placeholder numbers", + }, + Keywords: []string{ + "replace", "hash", "pound", "template", "placeholder", "format", "substitute", "pattern", "randomize", "masking", + }, + Params: []Param{ + {Field: "str", Display: "String", Type: "string", Description: "String value to replace #'s"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + str, err := info.GetString(m, "str") + if err != nil { + return nil, err + } + + return numerify(f, str), nil + }, + }) + + AddFuncLookup("lexify", Info{ + Display: "Lexify", + Category: "string", + Description: "Replace ? with random generated letters", + Example: "?????@??????.com => billy@mister.com", + Output: "string", + Aliases: []string{ + "letter substitute", "pattern letters", "placeholder letters", "random letter filler", "character replacer", + }, + Keywords: []string{ + "replace", "question", "mark", "template", "placeholder", "format", "substitute", "pattern", "randomize", "masking", + }, + Params: []Param{ + {Field: "str", Display: "String", Type: "string", Description: "String value to replace ?'s"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + str, err := info.GetString(m, "str") + if err != nil { + return nil, err + } + + return lexify(f, str), nil + }, + }) + + AddFuncLookup("shufflestrings", Info{ + Display: "Shuffle Strings", + Category: "string", + Description: "Shuffle an array of strings", + Example: "hello,world,whats,up => whats,world,hello,up", + Output: "[]string", + ContentType: "application/json", + Aliases: []string{ + "array shuffle", "list randomize", "string reorder", "string mixer", "sequence shuffle", + }, + Keywords: []string{ + "collection", "list", "slice", "permutation", "randomized", "scrambled", "jumbled", "unordered", + }, + Params: []Param{ + {Field: "strs", Display: "Strings", Type: "[]string", Description: "Delimited separated strings"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + strs, err := info.GetStringArray(m, "strs") + if err != nil { + return nil, err + } + + shuffleStrings(f, strs) + + return strs, nil + }, + }) + + AddFuncLookup("randomstring", Info{ + Display: "Random String", + Category: "string", + Description: "Return a random string from a string array", + Example: "hello,world,whats,up => world", + Output: "string", + Aliases: []string{ + "string picker", "array choice", "string select", "random pick", "string chooser", + }, + Keywords: []string{ + "selection", "chosen", "picked", "random", "list", "slice", "array", "choice", "element", "option", + }, + Params: []Param{ + {Field: "strs", Display: "Strings", Type: "[]string", Description: "Delimited separated strings"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + strs, err := info.GetStringArray(m, "strs") + if err != nil { + return nil, err + } + + return randomString(f, strs), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/struct.go b/vendor/github.com/brianvoe/gofakeit/v7/struct.go new file mode 100644 index 0000000000..86026ae7cb --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/struct.go @@ -0,0 +1,642 @@ +package gofakeit + +import ( + "errors" + "fmt" + "reflect" + "strconv" + "strings" + "time" +) + +// RecursiveDepth controls the maximum recursion depth when populating structs. +// Increase if your data structures are deeply nested; decrease to be more conservative. +var RecursiveDepth = 10 + +// Struct fills in exported fields of a struct with random data +// based on the value of `fake` tag of exported fields +// or with the result of a call to the Fake() method +// if the field type implements `Fakeable`. +// Use `fake:"skip"` to explicitly skip an element. +// All built-in types are supported, with templating support +// for string types. +func Struct(v any) error { return structFunc(GlobalFaker, v) } + +// Struct fills in exported fields of a struct with random data +// based on the value of `fake` tag of exported fields. +// Use `fake:"skip"` to explicitly skip an element. +// All built-in types are supported, with templating support +// for string types. +func (f *Faker) Struct(v any) error { return structFunc(f, v) } + +func structFunc(f *Faker, v any) error { + return r(f, reflect.TypeOf(v), reflect.ValueOf(v), "", 0, 0) +} + +func r(f *Faker, t reflect.Type, v reflect.Value, tag string, size int, depth int) error { + // Handle special types + + if t.PkgPath() == "encoding/json" { + // encoding/json has two special types: + // - RawMessage + // - Number + + switch t.Name() { + case "RawMessage": + return rJsonRawMessage(f, v, tag) + case "Number": + return rJsonNumber(f, v, tag) + default: + return errors.New("unknown encoding/json type: " + t.Name()) + } + } + + // Handle generic types + switch t.Kind() { + case reflect.Ptr: + return rPointer(f, t, v, tag, size, depth) + case reflect.Struct: + return rStruct(f, t, v, tag, depth) + case reflect.String: + return rString(f, t, v, tag) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return rUint(f, t, v, tag) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return rInt(f, t, v, tag) + case reflect.Float32, reflect.Float64: + return rFloat(f, t, v, tag) + case reflect.Bool: + return rBool(f, t, v, tag) + case reflect.Array, reflect.Slice: + return rSlice(f, t, v, tag, size, depth) + case reflect.Map: + return rMap(f, t, v, tag, size, depth) + } + + return nil +} + +func rCustom(f *Faker, v reflect.Value, tag string) error { + // If tag is empty return error + if tag == "" { + return errors.New("tag is empty") + } + + fName, fParams := parseNameAndParamsFromTag(tag) + info := GetFuncLookup(fName) + + // Check to see if it's a replaceable lookup function + if info == nil { + return fmt.Errorf("function %q not found", tag) + } + + // Parse map params + mapParams, err := parseMapParams(info, fParams) + if err != nil { + return err + } + + // Call function + fValue, err := info.Generate(f, mapParams, info) + if err != nil { + return err + } + + // Create new element of expected type + field := reflect.New(reflect.TypeOf(fValue)) + field.Elem().Set(reflect.ValueOf(fValue)) + + // Check if element is pointer if so + // grab the underlying value + fieldElem := field.Elem() + if fieldElem.Kind() == reflect.Ptr { + fieldElem = fieldElem.Elem() + } + + // Check if field kind is the same as the expected type + if fieldElem.Kind() != v.Kind() { + // return error saying the field and kinds that do not match + return errors.New("field kind " + fieldElem.Kind().String() + " does not match expected kind " + v.Kind().String()) + } + + // Set the value + v.Set(fieldElem.Convert(v.Type())) + + // If a function is called to set the struct + // stop from going through sub fields + return nil +} + +func rStruct(f *Faker, t reflect.Type, v reflect.Value, tag string, depth int) error { + // Prevent recursing deeper than configured levels + if depth >= RecursiveDepth { + return nil + } + + // Check if tag exists, if so run custom function + if t.Name() != "" && tag != "" { + return rCustom(f, v, tag) + } + + // Check if struct is fakeable + if isFakeable(t) { + value, err := callFake(f, v, reflect.Struct) + if err != nil { + return err + } + + v.Set(reflect.ValueOf(value)) + return nil + } + + // Loop through all the fields of the struct + n := t.NumField() + for i := 0; i < n; i++ { + elementT := t.Field(i) + elementV := v.Field(i) + fakeTag, ok := elementT.Tag.Lookup("fake") + + // Check whether or not to skip this field + if ok && fakeTag == "skip" || fakeTag == "-" { + // Do nothing, skip it + continue + } + + // Check to make sure you can set it or that it's an embedded(anonymous) field + if !elementV.CanSet() && !elementT.Anonymous { + continue + } + + // Check if reflect type is of values we can specifically set + elemStr := elementT.Type.String() + switch elemStr { + case "time.Time", "*time.Time": + // Check if element is a pointer + elemV := elementV + if elemStr == "*time.Time" { + elemV = reflect.New(elementT.Type.Elem()).Elem() + } + + // Run rTime on the element + err := rTime(f, elementT, elemV, fakeTag) + if err != nil { + return err + } + + if elemStr == "*time.Time" { + elementV.Set(elemV.Addr()) + } + + continue + } + + // Check if fakesize is set + size := -1 // Set to -1 to indicate fakesize was not set + fs, ok := elementT.Tag.Lookup("fakesize") + if ok { + var err error + + // Check if size has params separated by , + if strings.Contains(fs, ",") { + sizeSplit := strings.SplitN(fs, ",", 2) + if len(sizeSplit) == 2 { + var sizeMin int + var sizeMax int + + sizeMin, err = strconv.Atoi(sizeSplit[0]) + if err != nil { + return err + } + sizeMax, err = strconv.Atoi(sizeSplit[1]) + if err != nil { + return err + } + + size = f.IntN(sizeMax-sizeMin+1) + sizeMin + } + } else { + size, err = strconv.Atoi(fs) + if err != nil { + return err + } + } + } + + // Recursively call r() to fill in the struct + err := r(f, elementT.Type, elementV, fakeTag, size, depth+1) + if err != nil { + return err + } + } + + return nil +} + +func rPointer(f *Faker, t reflect.Type, v reflect.Value, tag string, size int, depth int) error { + elemT := t.Elem() + // Prevent recursing deeper than configured levels + if depth >= RecursiveDepth { + return nil + } + + if v.IsNil() { + nv := reflect.New(elemT).Elem() + err := r(f, elemT, nv, tag, size, depth+1) + if err != nil { + return err + } + + v.Set(nv.Addr()) + } else { + err := r(f, elemT, v.Elem(), tag, size, depth+1) + if err != nil { + return err + } + } + + return nil +} + +func rSlice(f *Faker, t reflect.Type, v reflect.Value, tag string, size int, depth int) error { + // If you cant even set it dont even try + if !v.CanSet() { + return errors.New("cannot set slice") + } + + // Prevent recursing deeper than configured levels + if depth >= RecursiveDepth { + return nil + } + + // Check if tag exists, if so run custom function + if t.Name() != "" && tag != "" { + // Check to see if custom function works if not continue to normal loop of values + err := rCustom(f, v, tag) + if err == nil { + return nil + } + } else if isFakeable(t) { + value, err := callFake(f, v, reflect.Slice, reflect.Array) + if err != nil { + return err + } + + v.Set(reflect.ValueOf(value)) + return nil + } + + // Grab original size to use if needed for sub arrays + ogSize := size + + // If the value has a len and is less than the size + // use that instead of the requested size + elemLen := v.Len() + if elemLen == 0 && size == -1 { + size = number(f, 1, 10) + } else if elemLen != 0 && (size == -1 || elemLen < size) { + size = elemLen + } + + // Get the element type + elemT := t.Elem() + + // Loop through the elements length and set based upon the index + for i := 0; i < size; i++ { + nv := reflect.New(elemT) + err := r(f, elemT, nv.Elem(), tag, ogSize, depth+1) + if err != nil { + return err + } + + // If values are already set fill them up, otherwise append + if elemLen != 0 { + v.Index(i).Set(reflect.Indirect(nv)) + } else { + v.Set(reflect.Append(reflect.Indirect(v), reflect.Indirect(nv))) + } + } + + return nil +} + +func rMap(f *Faker, t reflect.Type, v reflect.Value, tag string, size int, depth int) error { + // If you cant even set it dont even try + if !v.CanSet() { + return errors.New("cannot set slice") + } + + // Prevent recursing deeper than configured levels + if depth >= RecursiveDepth { + return nil + } + + // Check if tag exists, if so run custom function + if tag != "" { + return rCustom(f, v, tag) + } else if isFakeable(t) && size <= 0 { + // Only call custom function if no fakesize is specified (size <= 0) + value, err := callFake(f, v, reflect.Map) + if err != nil { + return err + } + + v.Set(reflect.ValueOf(value)) + return nil + } + + // Set a size + newSize := size + if newSize == -1 { + newSize = number(f, 1, 10) + } + + // Create new map based upon map key value type + mapType := reflect.MapOf(t.Key(), t.Elem()) + newMap := reflect.MakeMap(mapType) + + for i := 0; i < newSize; i++ { + // Create new key + mapIndex := reflect.New(t.Key()) + err := r(f, t.Key(), mapIndex.Elem(), "", -1, depth+1) + if err != nil { + return err + } + + // Create new value + mapValue := reflect.New(t.Elem()) + err = r(f, t.Elem(), mapValue.Elem(), "", -1, depth+1) + if err != nil { + return err + } + + newMap.SetMapIndex(mapIndex.Elem(), mapValue.Elem()) + } + + // Set newMap into struct field + if t.Kind() == reflect.Ptr { + v.Set(newMap.Elem()) + } else { + v.Set(newMap) + } + + return nil +} + +func rString(f *Faker, t reflect.Type, v reflect.Value, tag string) error { + if tag != "" { + genStr, err := generate(f, tag) + if err != nil { + return err + } + + v.SetString(genStr) + } else if isFakeable(t) { + value, err := callFake(f, v, reflect.String) + if err != nil { + return err + } + + valueStr, ok := value.(string) + if !ok { + return errors.New("call to Fake method did not return a string") + } + v.SetString(valueStr) + } else { + genStr, err := generate(f, strings.Repeat("?", number(f, 4, 10))) + if err != nil { + return err + } + + v.SetString(genStr) + } + + return nil +} + +func rInt(f *Faker, t reflect.Type, v reflect.Value, tag string) error { + if tag != "" { + genStr, err := generate(f, tag) + if err != nil { + return err + } + + i, err := strconv.ParseInt(genStr, 10, 64) + if err != nil { + return err + } + + v.SetInt(i) + } else if isFakeable(t) { + value, err := callFake(f, v, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64) + if err != nil { + return err + } + + switch i := value.(type) { + case int: + v.SetInt(int64(i)) + case int8: + v.SetInt(int64(i)) + case int16: + v.SetInt(int64(i)) + case int32: + v.SetInt(int64(i)) + case int64: + v.SetInt(int64(i)) + default: + return errors.New("call to Fake method did not return an integer") + } + } else { + // If no tag or error converting to int, set with random value + switch t.Kind() { + case reflect.Int: + v.SetInt(int64Func(f)) + case reflect.Int8: + v.SetInt(int64(int8Func(f))) + case reflect.Int16: + v.SetInt(int64(int16Func(f))) + case reflect.Int32: + v.SetInt(int64(int32Func(f))) + case reflect.Int64: + v.SetInt(int64Func(f)) + } + } + + return nil +} + +func rUint(f *Faker, t reflect.Type, v reflect.Value, tag string) error { + if tag != "" { + genStr, err := generate(f, tag) + if err != nil { + return err + } + + u, err := strconv.ParseUint(genStr, 10, 64) + if err != nil { + return err + } + + v.SetUint(u) + } else if isFakeable(t) { + value, err := callFake(f, v, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64) + if err != nil { + return err + } + + switch i := value.(type) { + case uint: + v.SetUint(uint64(i)) + case uint8: + v.SetUint(uint64(i)) + case uint16: + v.SetUint(uint64(i)) + case uint32: + v.SetUint(uint64(i)) + case uint64: + v.SetUint(uint64(i)) + default: + return errors.New("call to Fake method did not return an unsigned integer") + } + } else { + // If no tag or error converting to uint, set with random value + switch t.Kind() { + case reflect.Uint: + v.SetUint(f.Uint64()) + case reflect.Uint8: + v.SetUint(uint64(uint8Func(f))) + case reflect.Uint16: + v.SetUint(uint64(uint16Func(f))) + case reflect.Uint32: + v.SetUint(uint64(uint32Func(f))) + case reflect.Uint64: + v.SetUint(f.Uint64()) + } + } + + return nil +} + +func rFloat(f *Faker, t reflect.Type, v reflect.Value, tag string) error { + if tag != "" { + genStr, err := generate(f, tag) + if err != nil { + return err + } + + f, err := strconv.ParseFloat(genStr, 64) + if err != nil { + return err + } + + v.SetFloat(f) + } else if isFakeable(t) { + value, err := callFake(f, v, reflect.Float32, reflect.Float64) + if err != nil { + return err + } + + switch i := value.(type) { + case float32: + v.SetFloat(float64(i)) + case float64: + v.SetFloat(float64(i)) + default: + return errors.New("call to Fake method did not return a float") + } + } else { + // If no tag or error converting to float, set with random value + switch t.Kind() { + case reflect.Float64: + v.SetFloat(float64Func(f)) + case reflect.Float32: + v.SetFloat(float64(float32Func(f))) + } + } + + return nil +} + +func rBool(f *Faker, t reflect.Type, v reflect.Value, tag string) error { + if tag != "" { + genStr, err := generate(f, tag) + if err != nil { + return err + } + + b, err := strconv.ParseBool(genStr) + if err != nil { + return err + } + + v.SetBool(b) + } else if isFakeable(t) { + value, err := callFake(f, v, reflect.Bool) + if err != nil { + return err + } + + switch i := value.(type) { + case bool: + v.SetBool(bool(i)) + default: + return errors.New("call to Fake method did not return a boolean") + } + } else { + // If no tag or error converting to boolean, set with random value + v.SetBool(boolFunc(f)) + } + + return nil +} + +// rTime will set a time.Time field the best it can from either the default date tag or from the generate tag +func rTime(f *Faker, t reflect.StructField, v reflect.Value, tag string) error { + if tag != "" { + // Generate time + timeOutput, err := generate(f, tag) + if err != nil { + return err + } + + // Check to see if timeOutput has monotonic clock reading + // if so, remove it. This is because time.Parse() does not + // support parsing the monotonic clock reading + if strings.Contains(timeOutput, " m=") { + timeOutput = strings.Split(timeOutput, " m=")[0] + } + + // Check to see if they are passing in a format to parse the time + timeFormat, timeFormatOK := t.Tag.Lookup("format") + if timeFormatOK { + timeFormat = javaDateTimeFormatToGolangFormat(timeFormat) + } else { + // If tag == "{date}" use time.RFC3339 + // They are attempting to use the default date lookup + if tag == "{date}" { + timeFormat = time.RFC3339 + } else { + // Default format of time.Now().String() + timeFormat = "2006-01-02 15:04:05.999999999 -0700 MST" + } + } + + // If output is larger than format cut the output + // This helps us avoid errors from time.Parse + if len(timeOutput) > len(timeFormat) { + timeOutput = timeOutput[:len(timeFormat)] + } + + // Attempt to parse the time + timeStruct, err := time.Parse(timeFormat, timeOutput) + if err != nil { + return err + } + + v.Set(reflect.ValueOf(timeStruct)) + return nil + } + + v.Set(reflect.ValueOf(date(f))) + return nil +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/template.go b/vendor/github.com/brianvoe/gofakeit/v7/template.go new file mode 100644 index 0000000000..39fff75379 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/template.go @@ -0,0 +1,465 @@ +package gofakeit + +import ( + "bytes" + "fmt" + "strconv" + "time" + + "reflect" + "strings" + "text/template" +) + +// TemplateOptions defines values needed for template document generation +type TemplateOptions struct { + Funcs template.FuncMap `fake:"-"` + Data any `json:"data" xml:"data" fake:"-"` +} + +// Template generates an document based on the the supplied template +func Template(template string, co *TemplateOptions) (string, error) { + if co == nil { + co = &TemplateOptions{} + GlobalFaker.Struct(co) + } + return templateFunc(template, templateFuncMap(GlobalFaker, &co.Funcs), co) +} + +// Template generates an document based on the the supplied template +func (f *Faker) Template(template string, co *TemplateOptions) (string, error) { + if co == nil { + co = &TemplateOptions{} + f.Struct(co) + } + return templateFunc(template, templateFuncMap(f, &co.Funcs), co) +} + +// MarkdownOptions defines values needed for markdown document generation +type MarkdownOptions struct { +} + +// Template for Markdown +const templateMarkdown = ` +{{$repo := Gamertag}} +{{$language := RandomString (SliceString "go" "python" "javascript")}} +{{$username := Gamertag}} +{{$weightedSlice := SliceAny "github.com" "gitlab.com" "bitbucket.org"}} +{{$weightedWeights := SliceF32 5 1 1}} +{{$domain := Weighted $weightedSlice $weightedWeights}} +{{$action := RandomString (SliceString "process" "run" "execute" "perform" "handle")}} +{{$usage := RandomString (SliceString "whimsical story" "quirky message" "playful alert" "funny request" "lighthearted command")}} +{{$result := RandomString (SliceString "success" "error" "unknown" "completed" "failed" "finished" "in progress" "terminated")}} + +# {{$repo}} + +*Author: {{FirstName}} {{LastName}}* + +{{Paragraph 2 5 7 "\n\n"}} + +## Table of Contents +- [Installation](#installation) +- [Usage](#usage) +- [License](#license) + +## Installation +{{if eq $language "go"}}'''go +go get {{$domain}}/{{$username}}/{{$repo}} +'''{{else if eq $language "python"}}'''bash +pip install {{$repo}} +'''{{else if eq $language "javascript"}}'''js +npm install {{$repo}} +'''{{end}} + +## Usage +{{if eq $language "go"}}'''go +result := {{$repo}}.{{$action}}("{{ToLower $usage}}") +fmt.Println("{{ToLower $repo}} result:", "{{ToLower $result}}") +'''{{else if eq $language "python"}}'''python +result = {{ToLower $repo}}.{{$action}}("{{ToLower $usage}}") +print("{{ToLower $repo}} result:", "{{ToLower $result}}") +'''{{else if eq $language "javascript"}}'''javascript +const result = {{ToLower $repo}}.{{$action}}("{{ToLower $usage}}"); +console.log("{{ToLower $repo}} result:", "{{ToLower $result}}"); +'''{{end}} + +## License +{{RandomString (SliceString "MIT" "Apache 2.0" "GPL-3.0" "BSD-3-Clause" "ISC")}} +` + +// Markdown will return a single random Markdown template document +func Markdown(co *MarkdownOptions) (string, error) { + if co == nil { + co = &MarkdownOptions{} + GlobalFaker.Struct(co) + } + return templateFunc(templateMarkdown, templateFuncMap(GlobalFaker, nil), co) +} + +// Markdown will return a single random Markdown template document +func (f *Faker) Markdown(co *MarkdownOptions) (string, error) { + if co == nil { + co = &MarkdownOptions{} + f.Struct(co) + } + return templateFunc(templateMarkdown, templateFuncMap(f, nil), co) +} + +// EmailOptions defines values needed for email document generation +type EmailOptions struct { +} + +// Template for email text +const templateEmail = ` +Subject: {{RandomString (SliceString "Greetings" "Hello" "Hi")}} from {{FirstName}}! + +Dear {{LastName}}, + +{{RandomString (SliceString "Greetings!" "Hello there!" "Hi, how are you?")}} {{RandomString (SliceString "How's everything going?" "I hope your day is going well." "Sending positive vibes your way.")}} + +{{RandomString (SliceString "I trust this email finds you well." "I hope you're doing great." "Hoping this message reaches you in good spirits.")}} {{RandomString (SliceString "Wishing you a fantastic day!" "May your week be filled with joy." "Sending good vibes your way.")}} + +{{Paragraph 3 5 10 "\n\n"}} + +{{RandomString (SliceString "I would appreciate your thoughts on" "I'm eager to hear your feedback on" "I'm curious to know what you think about")}} it. If you have a moment, please feel free to check out the project on {{RandomString (SliceString "GitHub" "GitLab" "Bitbucket")}} + +{{RandomString (SliceString "Your insights would be invaluable." "I'm eager to hear what you think." "Feel free to share your opinions with me.")}} {{RandomString (SliceString "Looking forward to your feedback!" "Your perspective is highly valued." "Your thoughts matter to me.")}} + +{{RandomString (SliceString "Thank you for your consideration!" "I appreciate your attention to this matter." "Your support means a lot to me.")}} {{RandomString (SliceString "Wishing you a wonderful day!" "Thanks in advance for your time." "Your feedback is greatly appreciated.")}} + +{{RandomString (SliceString "Warm regards" "Best wishes" "Kind regards" "Sincerely" "With gratitude")}} +{{FirstName}} {{LastName}} +{{Email}} +{{PhoneFormatted}} +` + +// EmailText will return a single random text email template document +func EmailText(co *EmailOptions) (string, error) { + if co == nil { + co = &EmailOptions{} + GlobalFaker.Struct(co) + } + return templateFunc(templateEmail, templateFuncMap(GlobalFaker, nil), co) +} + +// EmailText will return a single random text email template document +func (f *Faker) EmailText(co *EmailOptions) (string, error) { + if co == nil { + co = &EmailOptions{} + f.Struct(co) + } + return templateFunc(templateEmail, templateFuncMap(f, nil), co) +} + +// functions that wont work with template engine +var templateExclusion = []string{ + "RandomMapKey", + "SQL", + "Template", +} + +// Build the template.FuncMap for the template engine +func templateFuncMap(f *Faker, fm *template.FuncMap) *template.FuncMap { + + // create a new function map + funcMap := template.FuncMap{} + + v := reflect.ValueOf(f) + + // loop through the methods + for i := 0; i < v.NumMethod(); i++ { + // check if the method is in the exclusion list + if stringInSlice(v.Type().Method(i).Name, templateExclusion) { + continue + } + + // Check if method has return values + // If not don't add to function map + if v.Type().Method(i).Type.NumOut() == 0 { + continue + } + + // add the method to the function map + funcMap[v.Type().Method(i).Name] = v.Method(i).Interface() + } + + // make string upper case + funcMap["ToUpper"] = strings.ToUpper + + // make string lower case + funcMap["ToLower"] = strings.ToLower + + // make string title case + funcMap["IntRange"] = func(start, end int) []int { + n := end - start + 1 + result := make([]int, n) + for i := 0; i < n; i++ { + result[i] = start + i + } + return result + } + + // enable passing any type to return a string + funcMap["ToInt"] = func(args any) int { + switch v := args.(type) { + case string: + i, err := strconv.Atoi(v) + if err != nil { + return 0 + } + + return i + case float64: + return int(v) + case float32: + return int(v) + case int: + return v + + // Anything else return 0 + default: + return 0 + } + } + + // enable passing any type to return a float64 + funcMap["ToFloat"] = func(args any) float64 { + switch v := args.(type) { + case string: + i, err := strconv.ParseFloat(v, 64) + if err != nil { + return 0 + } + + return i + case float64: + return v + case float32: + return float64(v) + case int: + return float64(v) + + // Anything else return 0 + default: + return 0 + } + } + + // ensable passing any type to return a string + funcMap["ToString"] = func(args any) string { + switch v := args.(type) { + case string: + return v + case float64: + return strconv.FormatFloat(v, 'f', -1, 64) + case float32: + return strconv.FormatFloat(float64(v), 'f', -1, 32) + case int: + return strconv.Itoa(v) + + // Anything else return empty string + default: + return "" + } + } + + // function to convert string to date time + funcMap["ToDate"] = func(dateString string) time.Time { + date, err := time.Parse("2006-01-02", dateString) + if err != nil { + return time.Now() + } + return date + } + + // enable passing slice of interface to functions + funcMap["SliceAny"] = func(args ...any) []any { + return args + } + + // enable passing slice of string to functions + funcMap["SliceString"] = func(args ...string) []string { + return args + } + + // enable passing slice of uint to functions + funcMap["SliceUInt"] = func(args ...uint) []uint { + return args + } + + // enable passing slice of int to functions + funcMap["SliceInt"] = func(args ...int) []int { + return args + } + + // enable passing slice of int to functions + funcMap["SliceF32"] = func(args ...float32) []float32 { + return args + } + + // Add passed in function map to the function map + if fm != nil { + for k, v := range *fm { + funcMap[k] = v + } + } + + return &funcMap +} + +// function to build the function map for the template engine from the global faker +func templateFunc(temp string, funcs *template.FuncMap, data any) (string, error) { + if temp == "" { + return "", fmt.Errorf("template parameter is empty") + } + + // Create a new template and parse + template_gen, err := template.New("CodeRun").Funcs(*funcs).Parse(temp) + if err != nil { + return "", err + } + + b := &bytes.Buffer{} + err = template_gen.Execute(b, data) + if err != nil { + return "", err + } + + // Return the result + return strings.ReplaceAll(b.String(), "\\n", "\n"), nil + +} + +// addTemplateLookup will add the template functions to the global lookup +func addTemplateLookup() { + AddFuncLookup("template", Info{ + Display: "Template", + Category: "template", + Description: "Generates document from template", + Example: `{{Firstname}} {{Lastname}} + +// output +Markus Moen`, + Output: "string", + ContentType: "text/plain", + Aliases: []string{ + "document template", "layout", "blueprint", "design pattern", "text template", "generator", "format schema", + }, + Keywords: []string{ + "template", "generates", "format", "structure", "engine", "document", "pattern", "design", "syntax", "render", "compile", + }, + Params: []Param{ + {Field: "template", Display: "Template", Type: "string", Description: "Golang template to generate the document from"}, + {Field: "data", Display: "Custom Data", Type: "string", Default: "", Optional: true, Description: "Custom data to pass to the template"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + tpl, err := info.GetString(m, "template") + if err != nil { + return nil, err + } + + data, err := info.GetAny(m, "data") + if err != nil { + return nil, err + } + + templateOut, err := templateFunc(tpl, templateFuncMap(f, nil), &TemplateOptions{Data: data}) + if err != nil { + return nil, err + } + + return templateOut, nil + }, + }) + + AddFuncLookup("markdown", Info{ + Display: "Random markdown document", + Category: "template", + Description: "Lightweight markup language used for formatting plain text", + Example: `# PurpleSheep5 + +*Author: Amie Feil* + +Quarterly without week it hungry thing someone. Him regularly today whomever this revolt hence. From his timing as quantity us these. Yours live these frantic not may another. How this ours his them those whose. + +Them batch its Iraqi most that few. Abroad cheese this whereas next how there. Gorgeous genetics time choir fiction therefore yourselves. Am those infrequently heap software quarterly rather. Punctuation yellow where several his orchard to. + +## Table of Contents +- [Installation](#installation) +- [Usage](#usage) +- [License](#license) + +## Installation +'''bash +pip install PurpleSheep5 +''' + +## Usage +'''python +result = purplesheep5.process("funny request") +print("purplesheep5 result:", "in progress") +''' + +## License +MIT`, + Output: "string", + Aliases: []string{ + "markup language", "readme format", "lightweight markup", "documentation style", "plain text format", "md file", "doc format", + }, + Keywords: []string{ + "markdown", "markup", "language", "formatting", "plain", "text", "documentation", "lightweight", "syntax", "rendering", "structure", "readme", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + template_result, err := templateFunc(templateMarkdown, templateFuncMap(f, nil), &MarkdownOptions{}) + return string(template_result), err + }, + }) + + AddFuncLookup("email_text", Info{ + Display: "Random text email Document", + Category: "template", + Description: "Written content of an email message, including the sender's message to the recipient", + Example: `Subject: Greetings from Marcel! + +Dear Pagac, + +Hello there! Sending positive vibes your way. + +I hope you're doing great. May your week be filled with joy. + +Virtually woman where team late quarterly without week it hungry. Thing someone him regularly today whomever this revolt hence from. His timing as quantity us these yours live these frantic. Not may another how this ours his them those whose. Them batch its Iraqi most that few abroad cheese this. + +Whereas next how there gorgeous genetics time choir fiction therefore. Yourselves am those infrequently heap software quarterly rather punctuation yellow. Where several his orchard to frequently hence victorious boxers each. Does auspicious yourselves first soup tomorrow this that must conclude. Anyway some yearly who cough laugh himself both yet rarely. + +Me dolphin intensely block would leap plane us first then. Down them eager would hundred super throughout animal yet themselves. Been group flock shake part purchase up usually it her. None it hers boat what their there Turkmen moreover one. Lebanese to brace these shower in it everybody should whatever. + +I'm curious to know what you think about it. If you have a moment, please feel free to check out the project on Bitbucket + +I'm eager to hear what you think. Looking forward to your feedback! + +Thank you for your consideration! Thanks in advance for your time. + +Kind regards +Milford Johnston +jamelhaag@king.org +(507)096-3058`, + Output: "string", + Aliases: []string{ + "email body", + "email text", + "email message", + "message body", + "email content", + }, + Keywords: []string{ + "email", "body", "message", "content", + "subject", "salutation", "greeting", "closing", + "signature", "footer", "paragraph", "plaintext", + "correspondence", "communication", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + template_result, err := templateFunc(templateEmail, templateFuncMap(f, nil), &EmailOptions{}) + return string(template_result), err + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/text.go b/vendor/github.com/brianvoe/gofakeit/v7/text.go new file mode 100644 index 0000000000..525c08f120 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/text.go @@ -0,0 +1,596 @@ +package gofakeit + +import ( + "bytes" + "errors" + "strings" + "unicode" +) + +type paragrapOptions struct { + paragraphCount int + sentenceCount int + wordCount int + separator string +} + +const bytesPerWordEstimation = 6 + +type sentenceGenerator func(f *Faker, wordCount int) string +type wordGenerator func(f *Faker) string + +// Comment will generate a random statement or remark expressing an opinion, observation, or reaction +func Comment() string { return comment(GlobalFaker) } + +// Comment will generate a random statement or remark expressing an opinion, observation, or reaction +func (f *Faker) Comment() string { return comment(f) } + +func comment(f *Faker) string { + structures := [][]string{ + {"interjection", "adjective", "noun", "verb", "adverb"}, + {"noun", "verb", "preposition", "determiner", "adjective", "noun"}, + {"noun", "verb", "adverb"}, + {"adjective", "noun", "verb"}, + {"noun", "verb", "preposition", "noun"}, + } + + // Randomly select a structure + structure := structures[number(f, 0, len(structures)-1)] + + // Build the sentence + var commentParts []string + for _, wordType := range structure { + switch wordType { + case "noun": + commentParts = append(commentParts, noun(f)) + case "verb": + commentParts = append(commentParts, verb(f)) + case "adjective": + commentParts = append(commentParts, adjective(f)) + case "adverb": + commentParts = append(commentParts, adverb(f)) + case "interjection": + commentParts = append(commentParts, interjection(f)) + case "preposition": + commentParts = append(commentParts, preposition(f)) + case "determiner": + commentParts = append(commentParts, nounDeterminer(f)) + default: + // Should never hit + panic("Invalid word type") + } + } + + // Combine the words into a sentence + sentence := strings.Join(commentParts, " ") + + // Capitalize the first letter + sentence = title(sentence) + + // Add a period to the end of the sentence + sentence = sentence + "." + + return sentence +} + +// Phrase will return a random phrase +func Phrase() string { return phrase(GlobalFaker) } + +// Phrase will return a random phrase +func (f *Faker) Phrase() string { return phrase(f) } + +func phrase(f *Faker) string { return getRandValue(f, []string{"sentence", "phrase"}) } + +// PhraseNoun will return a random noun phrase +func PhraseNoun() string { return phraseNoun(GlobalFaker) } + +// PhraseNoun will return a random noun phrase +func (f *Faker) PhraseNoun() string { return phraseNoun(f) } + +func phraseNoun(f *Faker) string { + str := "" + + // You may also want to add an adjective to describe the noun + if boolFunc(f) { + str = adjectiveDescriptive(f) + " " + noun(f) + } else { + str = noun(f) + } + + // Add determiner from weighted list + prob, _ := weighted(f, []any{1, 2, 3}, []float32{2, 1.5, 1}) + if prob == 1 { + str = getArticle(str) + " " + str + } else if prob == 2 { + str = "the " + str + } + + return str +} + +// PhraseVerb will return a random preposition phrase +func PhraseVerb() string { return phraseVerb(GlobalFaker) } + +// PhraseVerb will return a random preposition phrase +func (f *Faker) PhraseVerb() string { return phraseVerb(f) } + +func phraseVerb(f *Faker) string { + // Put together a string builder + sb := []string{} + + // You may have an adverb phrase + if boolFunc(f) { + sb = append(sb, phraseAdverb(f)) + } + + // Lets add the primary verb + sb = append(sb, verbAction(f)) + + // You may have a noun phrase + if boolFunc(f) { + sb = append(sb, phraseNoun(f)) + } + + // You may have an adverb phrase + if boolFunc(f) { + sb = append(sb, phraseAdverb(f)) + + // You may also have a preposition phrase + if boolFunc(f) { + sb = append(sb, phrasePreposition(f)) + } + + // You may also hae an adverb phrase + if boolFunc(f) { + sb = append(sb, phraseAdverb(f)) + } + } + + return strings.Join(sb, " ") +} + +// PhraseAdverb will return a random adverb phrase +func PhraseAdverb() string { return phraseAdverb(GlobalFaker) } + +// PhraseAdverb will return a random adverb phrase +func (f *Faker) PhraseAdverb() string { return phraseAdverb(f) } + +func phraseAdverb(f *Faker) string { + if boolFunc(f) { + return adverbDegree(f) + " " + adverbManner(f) + } + + return adverbManner(f) +} + +// PhrasePreposition will return a random preposition phrase +func PhrasePreposition() string { return phrasePreposition(GlobalFaker) } + +// PhrasePreposition will return a random preposition phrase +func (f *Faker) PhrasePreposition() string { return phrasePreposition(f) } + +func phrasePreposition(f *Faker) string { + return prepositionSimple(f) + " " + phraseNoun(f) +} + +// Sentence will generate a random sentence +func Sentence(wordCount int) string { return sentence(GlobalFaker, wordCount) } + +// Sentence will generate a random sentence +func (f *Faker) Sentence(wordCount int) string { return sentence(f, wordCount) } + +func sentence(f *Faker, wordCount int) string { + return sentenceGen(f, wordCount, word) +} + +// Paragraph will generate a random paragraphGenerator +func Paragraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return paragraph(GlobalFaker, paragraphCount, sentenceCount, wordCount, separator) +} + +// Paragraph will generate a random paragraphGenerator +func (f *Faker) Paragraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return paragraph(f, paragraphCount, sentenceCount, wordCount, separator) +} + +func paragraph(f *Faker, paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return paragraphGen(f, paragrapOptions{paragraphCount, sentenceCount, wordCount, separator}, sentence) +} + +func sentenceGen(f *Faker, wordCount int, word wordGenerator) string { + if wordCount <= 0 { + return "" + } + + wordSeparator := ' ' + sentence := bytes.Buffer{} + sentence.Grow(wordCount * bytesPerWordEstimation) + + for i := 0; i < wordCount; i++ { + word := word(f) + if i == 0 { + runes := []rune(word) + runes[0] = unicode.ToTitle(runes[0]) + word = string(runes) + } + sentence.WriteString(word) + if i < wordCount-1 { + sentence.WriteRune(wordSeparator) + } + } + sentence.WriteRune('.') + return sentence.String() +} + +func paragraphGen(f *Faker, opts paragrapOptions, sentecer sentenceGenerator) string { + if opts.paragraphCount <= 0 || opts.sentenceCount <= 0 || opts.wordCount <= 0 { + return "" + } + + //to avoid making Go 1.10 dependency, we cannot use strings.Builder + paragraphs := bytes.Buffer{} + //we presume the length + paragraphs.Grow(opts.paragraphCount * opts.sentenceCount * opts.wordCount * bytesPerWordEstimation) + wordSeparator := ' ' + + for i := 0; i < opts.paragraphCount; i++ { + for e := 0; e < opts.sentenceCount; e++ { + paragraphs.WriteString(sentecer(f, opts.wordCount)) + if e < opts.sentenceCount-1 { + paragraphs.WriteRune(wordSeparator) + } + } + + if i < opts.paragraphCount-1 { + paragraphs.WriteString(opts.separator) + } + } + + return paragraphs.String() +} + +// Question will return a random question +func Question() string { + return question(GlobalFaker) +} + +// Question will return a random question +func (f *Faker) Question() string { + return question(f) +} + +func question(f *Faker) string { + return strings.Replace(hipsterSentence(f, number(f, 3, 10)), ".", "?", 1) +} + +// Quote will return a random quote from a random person +func Quote() string { return quote(GlobalFaker) } + +// Quote will return a random quote from a random person +func (f *Faker) Quote() string { return quote(f) } + +func quote(f *Faker) string { + return `"` + hipsterSentence(f, number(f, 3, 10)) + `" - ` + firstName(f) + " " + lastName(f) +} + +// LoremIpsumSentence will generate a random sentence +func LoremIpsumSentence(wordCount int) string { + return loremIpsumSentence(GlobalFaker, wordCount) +} + +// LoremIpsumSentence will generate a random sentence +func (f *Faker) LoremIpsumSentence(wordCount int) string { + return loremIpsumSentence(f, wordCount) +} + +func loremIpsumSentence(f *Faker, wordCount int) string { + return sentenceGen(f, wordCount, loremIpsumWord) +} + +// LoremIpsumParagraph will generate a random paragraphGenerator +func LoremIpsumParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return loremIpsumParagraph(GlobalFaker, paragraphCount, sentenceCount, wordCount, separator) +} + +// LoremIpsumParagraph will generate a random paragraphGenerator +func (f *Faker) LoremIpsumParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return loremIpsumParagraph(f, paragraphCount, sentenceCount, wordCount, separator) +} + +func loremIpsumParagraph(f *Faker, paragraphCount int, sentenceCount int, wordCount int, separator string) string { + return paragraphGen(f, paragrapOptions{paragraphCount, sentenceCount, wordCount, separator}, loremIpsumSentence) +} + +func addTextLookup() { + AddFuncLookup("comment", Info{ + Display: "Comment", + Category: "text", + Description: "Statement or remark expressing an opinion, observation, or reaction", + Example: "wow", + Output: "string", + Aliases: []string{ + "verbal statement", "expressed thought", "spoken remark", "communication element", "casual note", + }, + Keywords: []string{ + "opinion", "observation", "reaction", "response", "feedback", "critique", "interpretation", "perspective", "reflection", "discussion", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return comment(f), nil + }, + }) + + AddFuncLookup("phrase", Info{ + Display: "Phrase", + Category: "text", + Description: "A small group of words standing together", + Example: "time will tell", + Output: "string", + Aliases: []string{"word group", "language unit", "text element", "expression block"}, + Keywords: []string{"phrase", "words", "group", "sentence", "text", "language", "grammar", "expression", "unit", "collection"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return phrase(f), nil + }, + }) + + AddFuncLookup("phrasenoun", Info{ + Display: "Noun Phrase", + Category: "text", + Description: "Phrase with a noun as its head, functions within sentence like a noun", + Example: "a tribe", + Output: "string", + Aliases: []string{"nominal phrase", "substantive element", "subject phrase", "object phrase"}, + Keywords: []string{"phrase", "noun", "grammar", "subject", "object", "head", "sentence", "nominal", "substantive", "entity"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return phraseNoun(f), nil + }, + }) + + AddFuncLookup("phraseverb", Info{ + Display: "Verb Phrase", + Category: "text", + Description: "Phrase that Consists of a verb and its modifiers, expressing an action or state", + Example: "a tribe", + Output: "string", + Aliases: []string{"predicate phrase", "verbal element", "action phrase", "state phrase"}, + Keywords: []string{"phrase", "verb", "grammar", "action", "state", "modifiers", "sentence", "predicate", "verbal", "behavior"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return phraseVerb(f), nil + }, + }) + + AddFuncLookup("phraseadverb", Info{ + Display: "Adverb Phrase", + Category: "text", + Description: "Phrase that modifies a verb, adjective, or another adverb, providing additional information.", + Example: "fully gladly", + Output: "string", + Aliases: []string{"adverbial phrase", "qualifier element", "modifier phrase", "description phrase"}, + Keywords: []string{"phrase", "adverb", "grammar", "modifier", "description", "information", "adverbial", "qualifier", "modification"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return phraseAdverb(f), nil + }, + }) + + AddFuncLookup("phrasepreposition", Info{ + Display: "Preposition Phrase", + Category: "text", + Description: "Phrase starting with a preposition, showing relation between elements in a sentence.", + Example: "out the black thing", + Output: "string", + Aliases: []string{"prepositional phrase", "relational element", "connection phrase", "grammar bridge"}, + Keywords: []string{"phrase", "preposition", "grammar", "relation", "connection", "sentence", "prepositional", "relational", "linking"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return phrasePreposition(f), nil + }, + }) + + AddFuncLookup("sentence", Info{ + Display: "Sentence", + Category: "text", + Description: "Set of words expressing a statement, question, exclamation, or command", + Example: "Interpret context record river mind.", + Output: "string", + Aliases: []string{"complete thought", "grammatical unit", "word group", "linguistic element"}, + Keywords: []string{"sentence", "complete", "thought", "grammatical", "unit", "word", "group", "expression", "clause", "utterance"}, + Params: []Param{ + {Field: "wordcount", Display: "Word Count", Type: "int", Default: "5", Description: "Number of words in a sentence"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + wordCount, err := info.GetInt(m, "wordcount") + if err != nil { + return nil, err + } + if wordCount <= 0 || wordCount > 50 { + return nil, errors.New("invalid word count, must be greater than 0, less than 50") + } + + return sentence(f, wordCount), nil + }, + }) + + AddFuncLookup("paragraph", Info{ + Display: "Paragraph", + Category: "text", + Description: "Distinct section of writing covering a single theme, composed of multiple sentences", + Example: "Interpret context record river mind press self should compare property outcome divide. Combine approach sustain consult discover explanation direct address church husband seek army. Begin own act welfare replace press suspect stay link place manchester specialist. Arrive price satisfy sign force application hair train provide basis right pay. Close mark teacher strengthen information attempt head touch aim iron tv take.", + Output: "string", + Aliases: []string{"text block", "writing section", "thematic unit", "content block"}, + Keywords: []string{"paragraph", "text", "block", "writing", "section", "theme", "sentences", "composition", "distinct", "passage", "content"}, + Params: []Param{ + {Field: "paragraphcount", Display: "Paragraph Count", Type: "int", Default: "2", Description: "Number of paragraphs"}, + {Field: "sentencecount", Display: "Sentence Count", Type: "int", Default: "2", Description: "Number of sentences in a paragraph"}, + {Field: "wordcount", Display: "Word Count", Type: "int", Default: "5", Description: "Number of words in a sentence"}, + {Field: "paragraphseparator", Display: "Paragraph Separator", Type: "string", Default: "
", Description: "String value to add between paragraphs"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + paragraphCount, err := info.GetInt(m, "paragraphcount") + if err != nil { + return nil, err + } + if paragraphCount <= 0 || paragraphCount > 20 { + return nil, errors.New("invalid paragraph count, must be greater than 0, less than 20") + } + + sentenceCount, err := info.GetInt(m, "sentencecount") + if err != nil { + return nil, err + } + if sentenceCount <= 0 || sentenceCount > 20 { + return nil, errors.New("invalid sentence count, must be greater than 0, less than 20") + } + + wordCount, err := info.GetInt(m, "wordcount") + if err != nil { + return nil, err + } + if wordCount <= 0 || wordCount > 50 { + return nil, errors.New("invalid word count, must be greater than 0, less than 50") + } + + paragraphSeparator, err := info.GetString(m, "paragraphseparator") + if err != nil { + return nil, err + } + + return paragraph(f, paragraphCount, sentenceCount, wordCount, paragraphSeparator), nil + }, + }) + + AddFuncLookup("question", Info{ + Display: "Question", + Category: "text", + Description: "Statement formulated to inquire or seek clarification", + Example: "Roof chia echo?", + Output: "string", + Aliases: []string{ + "interrogative sentence", + "information request", + "asking phrase", + "query prompt", + "clarifying ask", + }, + Keywords: []string{ + "question", "inquiry", "clarification", "interrogative", + "ask", "who", "what", "when", "where", "why", "how", "mark", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return question(f), nil + }, + }) + + AddFuncLookup("quote", Info{ + Display: "Quote", + Category: "text", + Description: "Direct repetition of someone else's words", + Example: `"Roof chia echo." - Lura Lockman`, + Output: "string", + Aliases: []string{ + "direct speech", + "verbatim line", + "cited passage", + "attributed text", + "pulled excerpt", + }, + Keywords: []string{ + "quote", "quotation", "citation", "reference", "excerpt", + "epigraph", "saying", "maxim", "attribution", "blockquote", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return quote(f), nil + }, + }) + + AddFuncLookup("loremipsumsentence", Info{ + Display: "Lorem Ipsum Sentence", + Category: "text", + Description: "Sentence of the Lorem Ipsum placeholder text used in design and publishing", + Example: "Quia quae repellat consequatur quidem.", + Output: "string", + Aliases: []string{ + "lorem sentence", + "ipsum sentence", + "placeholder sentence", + "latin sentence", + }, + Keywords: []string{ + "lorem", "ipsum", "sentence", "placeholder", + "latin", "dummy", "filler", "text", + "typography", "mockup", + }, + Params: []Param{ + {Field: "wordcount", Display: "Word Count", Type: "int", Default: "5", Description: "Number of words in a sentence"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + wordCount, err := info.GetInt(m, "wordcount") + if err != nil { + return nil, err + } + if wordCount <= 0 || wordCount > 50 { + return nil, errors.New("invalid word count, must be greater than 0, less than 50") + } + + return loremIpsumSentence(f, wordCount), nil + }, + }) + + AddFuncLookup("loremipsumparagraph", Info{ + Display: "Lorem Ipsum Paragraph", + Category: "text", + Description: "Paragraph of the Lorem Ipsum placeholder text used in design and publishing", + Example: `Quia quae repellat consequatur quidem nisi quo qui voluptatum accusantium quisquam amet. Quas et ut non dolorem ipsam aut enim assumenda mollitia harum ut. Dicta similique veniam nulla voluptas at excepturi non ad maxime at non. Eaque hic repellat praesentium voluptatem qui consequuntur dolor iusto autem velit aut. Fugit tempore exercitationem harum consequatur voluptatum modi minima aut eaque et et. + +Aut ea voluptatem dignissimos expedita odit tempore quod aut beatae ipsam iste. Minus voluptatibus dolorem maiores eius sed nihil vel enim odio voluptatem accusamus. Natus quibusdam temporibus tenetur cumque sint necessitatibus dolorem ex ducimus iusto ex. Voluptatem neque dicta explicabo officiis et ducimus sit ut ut praesentium pariatur. Illum molestias nisi at dolore ut voluptatem accusantium et fugiat et ut. + +Explicabo incidunt reprehenderit non quia dignissimos recusandae vitae soluta quia et quia. Aut veniam voluptas consequatur placeat sapiente non eveniet voluptatibus magni velit eum. Nobis vel repellendus sed est qui autem laudantium quidem quam ullam consequatur. Aut iusto ut commodi similique quae voluptatem atque qui fugiat eum aut. Quis distinctio consequatur voluptatem vel aliquid aut laborum facere officiis iure tempora.`, + Output: "string", + Aliases: []string{ + "lorem paragraph", + "ipsum paragraph", + "placeholder paragraph", + "latin paragraph", + }, + Keywords: []string{ + "lorem", "ipsum", "paragraph", "placeholder", + "latin", "dummy", "filler", "text", + "typography", "mockup", + }, + Params: []Param{ + {Field: "paragraphcount", Display: "Paragraph Count", Type: "int", Default: "2", Description: "Number of paragraphs"}, + {Field: "sentencecount", Display: "Sentence Count", Type: "int", Default: "2", Description: "Number of sentences in a paragraph"}, + {Field: "wordcount", Display: "Word Count", Type: "int", Default: "5", Description: "Number of words in a sentence"}, + {Field: "paragraphseparator", Display: "Paragraph Separator", Type: "string", Default: "
", Description: "String value to add between paragraphs"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + paragraphCount, err := info.GetInt(m, "paragraphcount") + if err != nil { + return nil, err + } + if paragraphCount <= 0 || paragraphCount > 20 { + return nil, errors.New("invalid paragraph count, must be greater than 0, less than 20") + } + + sentenceCount, err := info.GetInt(m, "sentencecount") + if err != nil { + return nil, err + } + if sentenceCount <= 0 || sentenceCount > 20 { + return nil, errors.New("invalid sentence count, must be greater than 0, less than 20") + } + + wordCount, err := info.GetInt(m, "wordcount") + if err != nil { + return nil, err + } + if wordCount <= 0 || wordCount > 50 { + return nil, errors.New("invalid word count, must be greater than 0, less than 50") + } + + paragraphSeparator, err := info.GetString(m, "paragraphseparator") + if err != nil { + return nil, err + } + + return loremIpsumParagraph(f, paragraphCount, sentenceCount, wordCount, paragraphSeparator), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/weighted.go b/vendor/github.com/brianvoe/gofakeit/v7/weighted.go new file mode 100644 index 0000000000..95d4cade58 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/weighted.go @@ -0,0 +1,112 @@ +package gofakeit + +import ( + "errors" +) + +// Weighted will take in an array of options and weights and return a random selection based upon its indexed weight +func Weighted(options []any, weights []float32) (any, error) { + return weighted(GlobalFaker, options, weights) +} + +// Weighted will take in an array of options and weights and return a random selection based upon its indexed weight +func (f *Faker) Weighted(options []any, weights []float32) (any, error) { + return weighted(f, options, weights) +} + +// Weighted will take in an array of options and weights and return a random selection based upon its indexed weight +func weighted(f *Faker, options []any, weights []float32) (any, error) { + ol := len(options) + wl := len(weights) + + // If options length is 1 just return it back + if ol == 1 { + return options[0], nil + } + + // Make sure they are passing in options + if ol == 0 { + return nil, errors.New("didnt pass options") + } + + // Make sure they are passing in weights + if wl == 0 { + return nil, errors.New("didnt pass weights") + } + + // Make sure they are passing in the same length + if ol != wl { + return nil, errors.New("options and weights need to be the same length") + } + + // Compute the discrete cumulative density from the sum of the weights + cdf := make([]float32, wl) + var sumOfWeights float32 = 0.0 + for i, weight := range weights { + if i > 0 { + cdf[i] = cdf[i-1] + weight + sumOfWeights += weight + continue + } + + cdf[i] = weight + sumOfWeights += weight + } + + // Get rand value from a multple of sumOfWeights + randSumOfWeights := f.Float32() * sumOfWeights + + var l int = 0 + var h int = wl - 1 + for l <= h { + m := l + (h-l)/2 + if randSumOfWeights <= cdf[m] { + if m == 0 || (m > 0 && randSumOfWeights > cdf[m-1]) { + return options[m], nil + } + h = m - 1 + } else { + l = m + 1 + } + } + + return nil, errors.New("end of function") +} + +func addWeightedLookup() { + AddFuncLookup("weighted", Info{ + Display: "Weighted", + Category: "misc", + Description: "Randomly select a given option based upon an equal amount of weights", + Example: "[hello, 2, 6.9],[1, 2, 3] => 6.9", + Output: "any", + Aliases: []string{ + "weighted choice", "probabilistic pick", "random weight", "distribution choice", "chance selection", "ratio selection", "stochastic option", + }, + Keywords: []string{ + "randomly", "select", "probability", "distribution", "likelihood", "chance", "statistical", "outcome", "bias", "ratio", + }, + Params: []Param{ + {Field: "options", Display: "Options", Type: "[]string", Description: "Array of any values"}, + {Field: "weights", Display: "Weights", Type: "[]float", Description: "Array of weights"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + options, err := info.GetStringArray(m, "options") + if err != nil { + return nil, err + } + + weights, err := info.GetFloat32Array(m, "weights") + if err != nil { + return nil, err + } + + optionsInterface := make([]any, len(options)) + for i, o := range options { + optionsInterface[i] = o + } + + return weighted(f, optionsInterface, weights) + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_adjective.go b/vendor/github.com/brianvoe/gofakeit/v7/word_adjective.go new file mode 100644 index 0000000000..223c98085b --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_adjective.go @@ -0,0 +1,274 @@ +package gofakeit + +// Adjective will generate a random adjective +func Adjective() string { return adjective(GlobalFaker) } + +// Adjective will generate a random adjective +func (f *Faker) Adjective() string { return adjective(f) } + +func adjective(f *Faker) string { + var adjType = map[int]string{ + 0: "adjective_descriptive", + 1: "adjective_quantitative", + 2: "adjective_proper", + 3: "adjective_demonstrative", + 4: "adjective_possessive", + 5: "adjective_interrogative", + 6: "adjective_indefinite", + } + return getRandValue(f, []string{"word", adjType[number(f, 0, 6)]}) +} + +// AdjectiveDescriptive will generate a random descriptive adjective +func AdjectiveDescriptive() string { return adjectiveDescriptive(GlobalFaker) } + +// AdjectiveDescriptive will generate a random descriptive adjective +func (f *Faker) AdjectiveDescriptive() string { return adjectiveDescriptive(f) } + +func adjectiveDescriptive(f *Faker) string { + return getRandValue(f, []string{"word", "adjective_descriptive"}) +} + +// AdjectiveQuantitative will generate a random quantitative adjective +func AdjectiveQuantitative() string { return adjectiveQuantitative(GlobalFaker) } + +// AdjectiveQuantitative will generate a random quantitative adjective +func (f *Faker) AdjectiveQuantitative() string { return adjectiveQuantitative(f) } + +func adjectiveQuantitative(f *Faker) string { + return getRandValue(f, []string{"word", "adjective_quantitative"}) +} + +// AdjectiveProper will generate a random proper adjective +func AdjectiveProper() string { return adjectiveProper(GlobalFaker) } + +// AdjectiveProper will generate a random proper adjective +func (f *Faker) AdjectiveProper() string { return adjectiveProper(f) } + +func adjectiveProper(f *Faker) string { + return getRandValue(f, []string{"word", "adjective_proper"}) +} + +// AdjectiveDemonstrative will generate a random demonstrative adjective +func AdjectiveDemonstrative() string { return adjectiveDemonstrative(GlobalFaker) } + +// AdjectiveDemonstrative will generate a random demonstrative adjective +func (f *Faker) AdjectiveDemonstrative() string { return adjectiveDemonstrative(f) } + +func adjectiveDemonstrative(f *Faker) string { + return getRandValue(f, []string{"word", "adjective_demonstrative"}) +} + +// AdjectivePossessive will generate a random possessive adjective +func AdjectivePossessive() string { return adjectivePossessive(GlobalFaker) } + +// AdjectivePossessive will generate a random possessive adjective +func (f *Faker) AdjectivePossessive() string { return adjectivePossessive(f) } + +func adjectivePossessive(f *Faker) string { + return getRandValue(f, []string{"word", "adjective_possessive"}) +} + +// AdjectiveInterrogative will generate a random interrogative adjective +func AdjectiveInterrogative() string { return adjectiveInterrogative(GlobalFaker) } + +// AdjectiveInterrogative will generate a random interrogative adjective +func (f *Faker) AdjectiveInterrogative() string { return adjectiveInterrogative(f) } + +func adjectiveInterrogative(f *Faker) string { + return getRandValue(f, []string{"word", "adjective_interrogative"}) +} + +// AdjectiveIndefinite will generate a random indefinite adjective +func AdjectiveIndefinite() string { return adjectiveIndefinite(GlobalFaker) } + +// AdjectiveIndefinite will generate a random indefinite adjective +func (f *Faker) AdjectiveIndefinite() string { return adjectiveIndefinite(f) } + +func adjectiveIndefinite(f *Faker) string { + return getRandValue(f, []string{"word", "adjective_indefinite"}) +} + +func addWordAdjectiveLookup() { + AddFuncLookup("adjective", Info{ + Display: "Adjective", + Category: "word", + Description: "Word describing or modifying a noun", + Example: "genuine", + Output: "string", + Aliases: []string{ + "descriptor term", + "qualifying modifier", + "attribute marker", + "descriptive label", + "noun qualifier", + }, + Keywords: []string{ + "adjective", "noun", "speech", "quality", "attribute", + "characteristic", "property", "trait", "descriptive", "modifier", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjective(f), nil + }, + }) + + AddFuncLookup("adjectivedescriptive", Info{ + Display: "Descriptive Adjective", + Category: "word", + Description: "Adjective that provides detailed characteristics about a noun", + Example: "brave", + Output: "string", + Aliases: []string{ + "qualitative adjective", + "detail-rich modifier", + "characterizing term", + "specific descriptor", + "noun enhancer", + }, + Keywords: []string{ + "adjective", "word", "describing", "modifying", "attribute", + "property", "trait", "feature", "aspect", "detailed", "characteristics", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjectiveDescriptive(f), nil + }, + }) + + AddFuncLookup("adjectivequantitative", Info{ + Display: "Quantitative Adjective", + Category: "word", + Description: "Adjective that indicates the quantity or amount of something", + Example: "a little", + Output: "string", + Aliases: []string{ + "numeric descriptor", + "cardinal qualifier", + "quantifier adjective", + "how many indicator", + "magnitude marker", + }, + Keywords: []string{ + "adjective", "quantitative", "word", "describing", "modifying", + "count", "volume", "extent", "degree", "magnitude", "quantity", "amount", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjectiveQuantitative(f), nil + }, + }) + + AddFuncLookup("adjectiveproper", Info{ + Display: "Proper Adjective", + Category: "word", + Description: "Adjective derived from a proper noun, often used to describe nationality or origin", + Example: "Afghan", + Output: "string", + Aliases: []string{ + "nationality adjective", + "eponym-derived", + "proper-noun based", + "demonym adjective", + "origin descriptor", + }, + Keywords: []string{ + "adjective", "noun", "word", "describing", + "cultural", "regional", "ethnic", "linguistic", "heritage", + "proper", "nationality", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjectiveProper(f), nil + }, + }) + + AddFuncLookup("adjectivedemonstrative", Info{ + Display: "Demonstrative Adjective", + Category: "word", + Description: "Adjective used to point out specific things", + Example: "this", + Output: "string", + Aliases: []string{ + "demonstrative adjective", + "pointing adjective", + "deictic adjective", + "proximal distal adjective", + "reference adjective", + }, + Keywords: []string{ + "adjective", "demonstrative", "deictic", + "this", "that", "these", "those", + "proximal", "distal", "near", "far", + "pointer", "reference", "specific", "grammar", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjectiveDemonstrative(f), nil + }, + }) + + AddFuncLookup("adjectivepossessive", Info{ + Display: "Possessive Adjective", + Category: "word", + Description: "Adjective indicating ownership or possession", + Example: "my", + Output: "string", + Aliases: []string{ + "ownership adjective", + "owners descriptor", + "possessive determiner", + "belonging indicator", + "proprietary modifier", + }, + Keywords: []string{ + "adjective", "word", "grammar", + "my", "your", "his", "her", "its", "our", "their", + "belong", "possessive", "ownership", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjectivePossessive(f), nil + }, + }) + + AddFuncLookup("adjectiveinterrogative", Info{ + Display: "Interrogative Adjective", + Category: "word", + Description: "Adjective used to ask questions", + Example: "what", + Output: "string", + Aliases: []string{ + "interrogative adjective", + "question word", + "asking adjective", + "inquiry word", + "grammar adjective", + }, + Keywords: []string{ + "adjective", "word", "grammar", "what", "which", "whose", + "question", "inquiry", "interrogation", "interrogative", "ask", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjectiveInterrogative(f), nil + }, + }) + + AddFuncLookup("adjectiveindefinite", Info{ + Display: "Indefinite Adjective", + Category: "word", + Description: "Adjective describing a non-specific noun", + Example: "few", + Output: "string", + Aliases: []string{ + "unspecified adjective", + "quantifier-like", + "noncount marker", + "broad determiner", + "approximate amount", + }, + Keywords: []string{ + "adjective", "noun", "word", "grammar", + "some", "any", "many", "few", "several", "various", "certain", + "indefinite", "non-specific", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adjectiveIndefinite(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_adverb.go b/vendor/github.com/brianvoe/gofakeit/v7/word_adverb.go new file mode 100644 index 0000000000..fe5b546b4e --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_adverb.go @@ -0,0 +1,223 @@ +package gofakeit + +// Adverb will generate a random adverb +func Adverb() string { return adverb(GlobalFaker) } + +// Adverb will generate a random adverb +func (f *Faker) Adverb() string { return adverb(f) } + +func adverb(f *Faker) string { + var adverbType = map[int]string{ + 0: "adverb_manner", + 1: "adverb_degree", + 2: "adverb_place", + 3: "adverb_time_definite", + 4: "adverb_time_indefinite", + 5: "adverb_frequency_definite", + 6: "adverb_frequency_indefinite", + } + return getRandValue(f, []string{"word", adverbType[number(f, 0, 6)]}) +} + +// AdverbManner will generate a random manner adverb +func AdverbManner() string { return adverbManner(GlobalFaker) } + +// AdverbManner will generate a random manner adverb +func (f *Faker) AdverbManner() string { return adverbManner(f) } + +func adverbManner(f *Faker) string { return getRandValue(f, []string{"word", "adverb_manner"}) } + +// AdverbDegree will generate a random degree adverb +func AdverbDegree() string { return adverbDegree(GlobalFaker) } + +// AdverbDegree will generate a random degree adverb +func (f *Faker) AdverbDegree() string { return adverbDegree(f) } + +func adverbDegree(f *Faker) string { return getRandValue(f, []string{"word", "adverb_degree"}) } + +// AdverbPlace will generate a random place adverb +func AdverbPlace() string { return adverbPlace(GlobalFaker) } + +// AdverbPlace will generate a random place adverb +func (f *Faker) AdverbPlace() string { return adverbPlace(f) } + +func adverbPlace(f *Faker) string { return getRandValue(f, []string{"word", "adverb_place"}) } + +// AdverbTimeDefinite will generate a random time definite adverb +func AdverbTimeDefinite() string { return adverbTimeDefinite(GlobalFaker) } + +// AdverbTimeDefinite will generate a random time definite adverb +func (f *Faker) AdverbTimeDefinite() string { return adverbTimeDefinite(f) } + +func adverbTimeDefinite(f *Faker) string { + return getRandValue(f, []string{"word", "adverb_time_definite"}) +} + +// AdverbTimeIndefinite will generate a random time indefinite adverb +func AdverbTimeIndefinite() string { return adverbTimeIndefinite(GlobalFaker) } + +// AdverbTimeIndefinite will generate a random time indefinite adverb +func (f *Faker) AdverbTimeIndefinite() string { return adverbTimeIndefinite(f) } + +func adverbTimeIndefinite(f *Faker) string { + return getRandValue(f, []string{"word", "adverb_time_indefinite"}) +} + +// AdverbFrequencyDefinite will generate a random frequency definite adverb +func AdverbFrequencyDefinite() string { return adverbFrequencyDefinite(GlobalFaker) } + +// AdverbFrequencyDefinite will generate a random frequency definite adverb +func (f *Faker) AdverbFrequencyDefinite() string { return adverbFrequencyDefinite(f) } + +func adverbFrequencyDefinite(f *Faker) string { + return getRandValue(f, []string{"word", "adverb_frequency_definite"}) +} + +// AdverbFrequencyIndefinite will generate a random frequency indefinite adverb +func AdverbFrequencyIndefinite() string { return adverbFrequencyIndefinite(GlobalFaker) } + +// AdverbFrequencyIndefinite will generate a random frequency indefinite adverb +func (f *Faker) AdverbFrequencyIndefinite() string { return adverbFrequencyIndefinite(f) } + +func adverbFrequencyIndefinite(f *Faker) string { + return getRandValue(f, []string{"word", "adverb_frequency_indefinite"}) +} + +func addWordAdverbLookup() { + AddFuncLookup("adverb", Info{ + Display: "Adverb", + Category: "word", + Description: "Word that modifies verbs, adjectives, or other adverbs", + Example: "smoothly", + Output: "string", + Aliases: []string{ + "modifier", "descriptive word", "language part", "expression word", "qualifier", + }, + Keywords: []string{ + "intensity", "manner", "degree", "place", "time", "frequency", "extent", "emphasis", "usage", "context", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverb(f), nil + }, + }) + + AddFuncLookup("adverbmanner", Info{ + Display: "Adverb Manner", + Category: "word", + Description: "Adverb that describes how an action is performed", + Example: "stupidly", + Output: "string", + Aliases: []string{ + "manner word", "action style", "performance word", "descriptive term", "behavior word", + }, + Keywords: []string{ + "style", "process", "mode", "technique", "behavior", "attitude", "fashion", "pattern", "characteristic", "approach", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverbManner(f), nil + }, + }) + + AddFuncLookup("adverbdegree", Info{ + Display: "Adverb Degree", + Category: "word", + Description: "Adverb that indicates the degree or intensity of an action or adjective", + Example: "intensely", + Output: "string", + Aliases: []string{ + "degree word", "intensity word", "level word", "strength word", "extent word", + }, + Keywords: []string{ + "measure", "force", "strength", "scope", "magnitude", "gradation", "amount", "power", "amplification", "range", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverbDegree(f), nil + }, + }) + + AddFuncLookup("adverbplace", Info{ + Display: "Adverb Place", + Category: "word", + Description: "Adverb that indicates the location or direction of an action", + Example: "east", + Output: "string", + Aliases: []string{ + "place word", "location word", "direction word", "position word", "movement word", + }, + Keywords: []string{ + "orientation", "destination", "area", "region", "spot", "placement", "site", "territory", "geography", "setting", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverbPlace(f), nil + }, + }) + + AddFuncLookup("adverbtimedefinite", Info{ + Display: "Adverb Time Definite", + Category: "word", + Description: "Adverb that specifies the exact time an action occurs", + Example: "now", + Output: "string", + Aliases: []string{ + "time word", "definite time", "exact time", "moment word", "specific time", + }, + Keywords: []string{ + "precise", "instant", "point", "schedule", "fixed", "timestamp", "occasion", "momentary", "calendar", "chronology", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverbTimeDefinite(f), nil + }, + }) + + AddFuncLookup("adverbtimeindefinite", Info{ + Display: "Adverb Time Indefinite", + Category: "word", + Description: "Adverb that gives a general or unspecified time frame", + Example: "already", + Output: "string", + Aliases: []string{ + "time word", "indefinite time", "general time", "approximate time", "vague time", + }, + Keywords: []string{ + "uncertain", "broad", "loose", "non-specific", "undefined", "imprecise", "approximation", "unsure", "flexible", "open-ended", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverbTimeIndefinite(f), nil + }, + }) + + AddFuncLookup("adverbfrequencydefinite", Info{ + Display: "Adverb Frequency Definite", + Category: "word", + Description: "Adverb that specifies how often an action occurs with a clear frequency", + Example: "hourly", + Output: "string", + Aliases: []string{ + "frequency word", "repetition word", "regular word", "interval word", "scheduled word", + }, + Keywords: []string{ + "interval", "regular", "pattern", "routine", "cycle", "repetition", "rate", "periodic", "consistency", "predictable", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverbFrequencyDefinite(f), nil + }, + }) + + AddFuncLookup("adverbfrequencyindefinite", Info{ + Display: "Adverb Frequency Indefinite", + Category: "word", + Description: "Adverb that specifies how often an action occurs without specifying a particular frequency", + Example: "occasionally", + Output: "string", + Aliases: []string{ + "frequency word", "indefinite frequency", "irregular word", "sporadic word", "recurring word", + }, + Keywords: []string{ + "uncertain", "sporadic", "occasional", "irregular", "unfixed", "varying", "undetermined", "fluctuating", "approximate", "inconsistent", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return adverbFrequencyIndefinite(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_connective.go b/vendor/github.com/brianvoe/gofakeit/v7/word_connective.go new file mode 100644 index 0000000000..4738d144db --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_connective.go @@ -0,0 +1,173 @@ +package gofakeit + +// Connective will generate a random connective +func Connective() string { return connective(GlobalFaker) } + +// Connective will generate a random connective +func (f *Faker) Connective() string { return connective(f) } + +func connective(f *Faker) string { + var connectiveType = map[int]string{ + 0: "connective_time", + 1: "connective_comparative", + 2: "connective_complaint", + 3: "connective_listing", + 4: "connective_casual", + 5: "connective_examplify", + } + return getRandValue(f, []string{"word", connectiveType[number(f, 0, 5)]}) +} + +// ConnectiveTime will generate a random connective time +func ConnectiveTime() string { return connectiveTime(GlobalFaker) } + +// ConnectiveTime will generate a random connective time + +func (f *Faker) ConnectiveTime() string { return connectiveTime(f) } + +func connectiveTime(f *Faker) string { + return getRandValue(f, []string{"word", "connective_time"}) +} + +// ConnectiveComparative will generate a random comparative connective +func ConnectiveComparative() string { return connectiveComparative(GlobalFaker) } + +// ConnectiveComparative will generate a random comparative connective +func (f *Faker) ConnectiveComparative() string { return connectiveComparative(f) } + +func connectiveComparative(f *Faker) string { + return getRandValue(f, []string{"word", "connective_comparative"}) +} + +// ConnectiveComplaint will generate a random complaint connective +func ConnectiveComplaint() string { return connectiveComplaint(GlobalFaker) } + +// ConnectiveComplaint will generate a random complaint connective +func (f *Faker) ConnectiveComplaint() string { return connectiveComplaint(f) } + +func connectiveComplaint(f *Faker) string { + return getRandValue(f, []string{"word", "connective_complaint"}) +} + +// ConnectiveListing will generate a random listing connective +func ConnectiveListing() string { return connectiveListing(GlobalFaker) } + +// ConnectiveListing will generate a random listing connective +func (f *Faker) ConnectiveListing() string { return connectiveListing(f) } + +func connectiveListing(f *Faker) string { + return getRandValue(f, []string{"word", "connective_listing"}) +} + +// ConnectiveCasual will generate a random casual connective +func ConnectiveCasual() string { return connectiveCasual(GlobalFaker) } + +// ConnectiveCasual will generate a random casual connective +func (f *Faker) ConnectiveCasual() string { return connectiveCasual(f) } + +func connectiveCasual(f *Faker) string { + return getRandValue(f, []string{"word", "connective_casual"}) +} + +// ConnectiveExamplify will generate a random examplify connective +func ConnectiveExamplify() string { return connectiveExamplify(GlobalFaker) } + +// ConnectiveExamplify will generate a random examplify connective +func (f *Faker) ConnectiveExamplify() string { return connectiveExamplify(f) } + +func connectiveExamplify(f *Faker) string { + return getRandValue(f, []string{"word", "connective_examplify"}) +} + +func addWordConnectiveLookup() { + AddFuncLookup("connective", Info{ + Display: "Connective", + Category: "word", + Description: "Word used to connect words or sentences", + Example: "such as", + Output: "string", + Aliases: []string{"joining element", "grammar connector", "sentence bridge", "word linker"}, + Keywords: []string{"connective", "word", "connect", "sentence", "grammar", "used", "conjunction", "link", "joining"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return connective(f), nil + }, + }) + + AddFuncLookup("connectivetime", Info{ + Display: "Connective Time", + Category: "word", + Description: "Connective word used to indicate a temporal relationship between events or actions", + Example: "finally", + Output: "string", + Aliases: []string{"temporal connector", "time relationship", "chronological link", "sequence element"}, + Keywords: []string{"connective", "time", "temporal", "relationship", "events", "grammar", "actions", "chronological", "sequence", "timing"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return connectiveTime(f), nil + }, + }) + + AddFuncLookup("connectivecomparative", Info{ + Display: "Connective Comparitive", + Category: "word", + Description: "Connective word used to indicate a comparison between two or more things", + Example: "in addition", + Output: "string", + Aliases: []string{"comparison connector", "contrast element", "similarity link", "grammar bridge"}, + Keywords: []string{"connective", "comparative", "comparison", "things", "grammar", "indicate", "contrast", "similarity", "relative"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return connectiveComparative(f), nil + }, + }) + + AddFuncLookup("connectivecomplaint", Info{ + Display: "Connective Complaint", + Category: "word", + Description: "Connective word used to express dissatisfaction or complaints about a situation", + Example: "besides", + Output: "string", + Aliases: []string{"objection connector", "criticism element", "dissatisfaction link", "grammar bridge"}, + Keywords: []string{"connective", "complaint", "dissatisfaction", "situation", "grammar", "express", "objection", "criticism", "negative"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return connectiveComplaint(f), nil + }, + }) + + AddFuncLookup("connectivelisting", Info{ + Display: "Connective Listing", + Category: "word", + Description: "Connective word used to list or enumerate items or examples", + Example: "firstly", + Output: "string", + Aliases: []string{"enumeration connector", "sequence element", "order link", "grammar bridge"}, + Keywords: []string{"connective", "listing", "enumerate", "items", "examples", "grammar", "list", "sequence", "order", "numbered"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return connectiveListing(f), nil + }, + }) + + AddFuncLookup("connectivecasual", Info{ + Display: "Connective Casual", + Category: "word", + Description: "Connective word used to indicate a cause-and-effect relationship between events or actions", + Example: "an outcome of", + Output: "string", + Aliases: []string{"causal connector", "effect relationship", "consequence link", "grammar bridge"}, + Keywords: []string{"connective", "casual", "cause", "effect", "relationship", "grammar", "events", "actions", "causal", "consequence", "result"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return connectiveCasual(f), nil + }, + }) + + AddFuncLookup("connectiveexamplify", Info{ + Display: "Connective Examplify", + Category: "word", + Description: "Connective word used to provide examples or illustrations of a concept or idea", + Example: "then", + Output: "string", + Aliases: []string{"example connector", "illustration element", "instance link", "grammar bridge"}, + Keywords: []string{"connective", "examplify", "examples", "illustrations", "concept", "grammar", "provide", "instance", "demonstration", "sample"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return connectiveExamplify(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_general.go b/vendor/github.com/brianvoe/gofakeit/v7/word_general.go new file mode 100644 index 0000000000..4e1085b7b5 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_general.go @@ -0,0 +1,39 @@ +package gofakeit + +import ( + "strings" + + "github.com/brianvoe/gofakeit/v7/data" +) + +// Word will generate a random word +func Word() string { return word(GlobalFaker) } + +// Word will generate a random word +func (f *Faker) Word() string { return word(f) } + +func word(f *Faker) string { + word := getRandValue(f, []string{"word", randomString(f, data.WordKeys)}) + + // Word may return a couple of words, if so we will split on space and return a random word + if strings.Contains(word, " ") { + return randomString(f, strings.Split(word, " ")) + } + + return word +} + +func addWordGeneralLookup() { + AddFuncLookup("word", Info{ + Display: "Word", + Category: "word", + Description: "Basic unit of language representing a concept or thing, consisting of letters and having meaning", + Example: "man", + Output: "string", + Aliases: []string{"language unit", "speech element", "writing component", "lexical item"}, + Keywords: []string{"word", "basic", "unit", "language", "concept", "letters", "meaning", "representing", "lexeme", "vocabulary"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return word(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_grammar.go b/vendor/github.com/brianvoe/gofakeit/v7/word_grammar.go new file mode 100644 index 0000000000..74f5750741 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_grammar.go @@ -0,0 +1,36 @@ +package gofakeit + +import ( + "unicode" +) + +// SentenceSimple will generate a random simple sentence +func SentenceSimple() string { return sentenceSimple(GlobalFaker) } + +// SentenceSimple will generate a random simple sentence +func (f *Faker) SentenceSimple() string { return sentenceSimple(f) } + +func sentenceSimple(f *Faker) string { + // simple sentence consists of a noun phrase and a verb phrase + str := phraseNoun(f) + " " + phraseVerb(f) + "." + + // capitalize the first letter + strR := []rune(str) + strR[0] = unicode.ToUpper(strR[0]) + return string(strR) +} + +func addWordGrammerLookup() { + AddFuncLookup("sentencesimple", Info{ + Display: "Simple Sentence", + Category: "word", + Description: "Group of words that expresses a complete thought", + Example: "A tribe fly the lemony kitchen.", + Output: "string", + Aliases: []string{"basic statement", "complete thought", "independent clause", "grammar element"}, + Keywords: []string{"sentence", "simple", "complete", "thought", "grammar", "basic", "expresses", "independent", "clause", "statement"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return sentenceSimple(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_helper.go b/vendor/github.com/brianvoe/gofakeit/v7/word_helper.go new file mode 100644 index 0000000000..a1655ff59b --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_helper.go @@ -0,0 +1,45 @@ +package gofakeit + +import ( + "strings" +) + +// This will look at a few things to determine what kind of article to use for the word +func getArticle(word string) string { + // If nothing is passed return empty + if word == "" { + return "" + } + + word = strings.ToLower(word) + letters := strings.Split(word, "") + firstLetter := "" + secondLetter := "" + if len(letters) > 0 { + firstLetter = letters[0] + } + if len(letters) > 1 { + secondLetter = letters[1] + } + + // If the word starts with a, e, i, o, use an article + if firstLetter == "a" || firstLetter == "e" || firstLetter == "i" || firstLetter == "o" { + return "an" + } + + // If the word starts with a u and n or l, use an article + if firstLetter == "u" { + if secondLetter == "n" || secondLetter == "l" { + return "an" + } + } + + // If the word starts with a vowel, use an article + if firstLetter == "h" { + if secondLetter == "i" { + return "an" + } + } + + return "a" +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_misc.go b/vendor/github.com/brianvoe/gofakeit/v7/word_misc.go new file mode 100644 index 0000000000..422ac39628 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_misc.go @@ -0,0 +1,54 @@ +package gofakeit + +// Interjection will generate a random word expressing emotion +func Interjection() string { return interjection(GlobalFaker) } + +// Interjection will generate a random word expressing emotion +func (f *Faker) Interjection() string { return interjection(f) } + +func interjection(f *Faker) string { return getRandValue(f, []string{"word", "interjection"}) } + +// LoremIpsumWord will generate a random word +func LoremIpsumWord() string { return loremIpsumWord(GlobalFaker) } + +// LoremIpsumWord will generate a random word +func (f *Faker) LoremIpsumWord() string { return loremIpsumWord(f) } + +func loremIpsumWord(f *Faker) string { return getRandValue(f, []string{"lorem", "word"}) } + +func addWordMiscLookup() { + AddFuncLookup("interjection", Info{ + Display: "Interjection", + Category: "word", + Description: "Word expressing emotion", + Example: "wow", + Output: "string", + Aliases: []string{"emotional expression", "feeling word", "reaction term", "exclamation element"}, + Keywords: []string{"interjection", "emotion", "word", "expression", "feeling", "reaction", "exclamation", "utterance", "ejaculation", "emotional"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return interjection(f), nil + }, + }) + + AddFuncLookup("loremipsumword", Info{ + Display: "Lorem Ipsum Word", + Category: "word", + Description: "Word of the Lorem Ipsum placeholder text used in design and publishing", + Example: "quia", + Output: "string", + Aliases: []string{ + "lorem word", + "ipsum word", + "placeholder word", + "latin word", + }, + Keywords: []string{ + "lorem", "ipsum", "word", "placeholder", + "latin", "dummy", "filler", "text", + "typography", "mockup", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return loremIpsumWord(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_noun.go b/vendor/github.com/brianvoe/gofakeit/v7/word_noun.go new file mode 100644 index 0000000000..2c61340575 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_noun.go @@ -0,0 +1,309 @@ +package gofakeit + +// Noun will generate a random noun +func Noun() string { return noun(GlobalFaker) } + +// Noun will generate a random noun +func (f *Faker) Noun() string { return noun(f) } + +func noun(f *Faker) string { + var nounType = map[int]string{ + 0: "noun_common", + 1: "noun_concrete", + 2: "noun_abstract", + 3: "noun_collective_people", + 4: "noun_collective_animal", + 5: "noun_collective_thing", + 6: "noun_countable", + 7: "noun_uncountable", + } + return getRandValue(f, []string{"word", nounType[number(f, 0, 7)]}) +} + +// NounCommon will generate a random common noun +func NounCommon() string { return nounCommon(GlobalFaker) } + +// NounCommon will generate a random common noun +func (f *Faker) NounCommon() string { return nounCommon(f) } + +func nounCommon(f *Faker) string { return getRandValue(f, []string{"word", "noun_common"}) } + +// NounConcrete will generate a random concrete noun +func NounConcrete() string { return nounConcrete(GlobalFaker) } + +// NounConcrete will generate a random concrete noun +func (f *Faker) NounConcrete() string { return nounConcrete(f) } + +func nounConcrete(f *Faker) string { return getRandValue(f, []string{"word", "noun_concrete"}) } + +// NounAbstract will generate a random abstract noun +func NounAbstract() string { return nounAbstract(GlobalFaker) } + +// NounAbstract will generate a random abstract noun +func (f *Faker) NounAbstract() string { return nounAbstract(f) } + +func nounAbstract(f *Faker) string { return getRandValue(f, []string{"word", "noun_abstract"}) } + +// NounCollectivePeople will generate a random collective noun person +func NounCollectivePeople() string { return nounCollectivePeople(GlobalFaker) } + +// NounCollectivePeople will generate a random collective noun person +func (f *Faker) NounCollectivePeople() string { return nounCollectivePeople(f) } + +func nounCollectivePeople(f *Faker) string { + return getRandValue(f, []string{"word", "noun_collective_people"}) +} + +// NounCollectiveAnimal will generate a random collective noun animal +func NounCollectiveAnimal() string { return nounCollectiveAnimal(GlobalFaker) } + +// NounCollectiveAnimal will generate a random collective noun animal +func (f *Faker) NounCollectiveAnimal() string { return nounCollectiveAnimal(f) } + +func nounCollectiveAnimal(f *Faker) string { + return getRandValue(f, []string{"word", "noun_collective_animal"}) +} + +// NounCollectiveThing will generate a random collective noun thing +func NounCollectiveThing() string { return nounCollectiveThing(GlobalFaker) } + +// NounCollectiveThing will generate a random collective noun thing +func (f *Faker) NounCollectiveThing() string { return nounCollectiveThing(f) } + +func nounCollectiveThing(f *Faker) string { + return getRandValue(f, []string{"word", "noun_collective_thing"}) +} + +// NounCountable will generate a random countable noun +func NounCountable() string { return nounCountable(GlobalFaker) } + +// NounCountable will generate a random countable noun +func (f *Faker) NounCountable() string { return nounCountable(f) } + +func nounCountable(f *Faker) string { return getRandValue(f, []string{"word", "noun_countable"}) } + +// NounUncountable will generate a random uncountable noun +func NounUncountable() string { return nounUncountable(GlobalFaker) } + +// NounUncountable will generate a random uncountable noun +func (f *Faker) NounUncountable() string { return nounUncountable(f) } + +func nounUncountable(f *Faker) string { + return getRandValue(f, []string{"word", "noun_uncountable"}) +} + +// NounProper will generate a random proper noun +func NounProper() string { return nounProper(GlobalFaker) } + +// NounProper will generate a random proper noun +func (f *Faker) NounProper() string { return nounProper(f) } + +func nounProper(f *Faker) string { + switch randInt := randIntRange(f, 1, 3); randInt { + case 1: + return getRandValue(f, []string{"celebrity", "actor"}) + case 2: + genStr, _ := generate(f, getRandValue(f, []string{"address", "city"})) + return genStr + } + + return getRandValue(f, []string{"person", "first"}) +} + +// NounDeterminer will generate a random noun determiner +func NounDeterminer() string { return nounDeterminer(GlobalFaker) } + +// NounDeterminer will generate a random noun determiner +func (f *Faker) NounDeterminer() string { return nounDeterminer(f) } + +func nounDeterminer(f *Faker) string { return getRandValue(f, []string{"word", "noun_determiner"}) } + +func addWordNounLookup() { + AddFuncLookup("noun", Info{ + Display: "Noun", + Category: "word", + Description: "Person, place, thing, or idea, named or referred to in a sentence", + Example: "aunt", + Output: "string", + Aliases: []string{ + "random noun", "grammar noun", "word type", "part speech", "naming word", "lexical noun", "nominal word", + }, + Keywords: []string{ + "noun", "person", "place", "idea", "sentence", "grammar", "named", "referred", "subject", "object", "entity", "concept", "term", "substantive", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return noun(f), nil + }, + }) + + AddFuncLookup("nouncommon", Info{ + Display: "Noun Common", + Category: "word", + Description: "General name for people, places, or things, not specific or unique", + Example: "part", + Output: "string", + Aliases: []string{ + "common noun", "general noun", "generic name", "basic noun", "ordinary noun", "regular noun", "everyday noun", + }, + Keywords: []string{ + "common", "general", "name", "people", "places", "generic", "basic", "ordinary", "standard", "typical", "regular", "normal", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounCommon(f), nil + }, + }) + + AddFuncLookup("nounconcrete", Info{ + Display: "Noun Concrete", + Category: "word", + Description: "Names for physical entities experienced through senses like sight, touch, smell, or taste", + Example: "snowman", + Output: "string", + Aliases: []string{ + "concrete noun", "physical noun", "tangible noun", "material noun", "sensory noun", "real noun", "perceptible noun", + }, + Keywords: []string{ + "concrete", "physical", "entities", "senses", "sight", "touch", "smell", "taste", "tangible", "material", "solid", "real", "visible", "touchable", "observable", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounConcrete(f), nil + }, + }) + + AddFuncLookup("nounabstract", Info{ + Display: "Noun Abstract", + Category: "word", + Description: "Ideas, qualities, or states that cannot be perceived with the five senses", + Example: "confusion", + Output: "string", + Aliases: []string{ + "abstract noun", "concept noun", "idea noun", "intangible noun", "mental noun", "notional noun", "theoretical noun", + }, + Keywords: []string{ + "abstract", "ideas", "qualities", "states", "senses", "concept", "intangible", "mental", "theoretical", "emotional", "spiritual", "intellectual", "philosophical", "metaphysical", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounAbstract(f), nil + }, + }) + + AddFuncLookup("nouncollectivepeople", Info{ + Display: "Noun Collective People", + Category: "word", + Description: "Group of people or things regarded as a unit", + Example: "body", + Output: "string", + Aliases: []string{ + "collective noun", "group noun", "people group", "crowd noun", "assembly noun", "community noun", "societal noun", + }, + Keywords: []string{ + "collective", "people", "group", "unit", "regarded", "crowd", "assembly", "gathering", "team", "committee", "audience", "class", "family", "society", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounCollectivePeople(f), nil + }, + }) + + AddFuncLookup("nouncollectiveanimal", Info{ + Display: "Noun Collective Animal", + Category: "word", + Description: "Group of animals, like a 'pack' of wolves or a 'flock' of birds", + Example: "party", + Output: "string", + Aliases: []string{ + "animal collective", "pack noun", "flock noun", "herd noun", "swarm noun", "colony noun", "pride noun", + }, + Keywords: []string{ + "collective", "animal", "group", "pack", "flock", "animals", "herd", "swarm", "pride", "school", "colony", "pod", "gaggle", "murder", "exaltation", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounCollectiveAnimal(f), nil + }, + }) + + AddFuncLookup("nouncollectivething", Info{ + Display: "Noun Collective Thing", + Category: "word", + Description: "Group of objects or items, such as a 'bundle' of sticks or a 'cluster' of grapes", + Example: "hand", + Output: "string", + Aliases: []string{ + "object collective", "bundle noun", "cluster noun", "collection noun", "set noun", "batch noun", "pile noun", + }, + Keywords: []string{ + "collective", "thing", "group", "objects", "items", "bundle", "cluster", "collection", "set", "batch", "stack", "pile", "heap", "bunch", "array", "assortment", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounCollectiveThing(f), nil + }, + }) + + AddFuncLookup("nouncountable", Info{ + Display: "Noun Countable", + Category: "word", + Description: "Items that can be counted individually", + Example: "neck", + Output: "string", + Aliases: []string{ + "countable noun", "count noun", "discrete item", "enumerable noun", "plural noun", "numerical noun", "measurable noun", + }, + Keywords: []string{ + "countable", "items", "counted", "individually", "discrete", "enumerable", "plural", "many", "few", "number", "objects", "things", "units", "pieces", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounCountable(f), nil + }, + }) + + AddFuncLookup("noununcountable", Info{ + Display: "Noun Uncountable", + Category: "word", + Description: "Items that can't be counted individually", + Example: "seafood", + Output: "string", + Aliases: []string{ + "uncountable noun", "mass noun", "non-count noun", "bulk noun", "substance noun", "continuous noun", "material noun", + }, + Keywords: []string{ + "uncountable", "items", "counted", "individually", "mass", "bulk", "substance", "material", "liquid", "powder", "grain", "continuous", "indivisible", "measurement", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounUncountable(f), nil + }, + }) + + AddFuncLookup("nounproper", Info{ + Display: "Noun Proper", + Category: "word", + Description: "Specific name for a particular person, place, or organization", + Example: "John", + Output: "string", + Aliases: []string{ + "proper noun", "specific name", "person name", "place name", "organization name", "capitalized noun", "unique name", + }, + Keywords: []string{ + "proper", "specific", "name", "person", "place", "organization", "capitalized", "title", "brand", "company", "city", "country", "individual", "entity", "designation", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounProper(f), nil + }, + }) + + AddFuncLookup("noundeterminer", Info{ + Display: "Noun Determiner", + Category: "word", + Description: "Word that introduces a noun and identifies it as a noun", + Example: "your", + Output: "string", + Aliases: []string{ + "determiner word", "article word", "noun introducer", "specifier word", "modifier word", "defining word", "introductory word", + }, + Keywords: []string{ + "determiner", "word", "introduces", "identifies", "article", "specifier", "modifier", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return nounDeterminer(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_preposition.go b/vendor/github.com/brianvoe/gofakeit/v7/word_preposition.go new file mode 100644 index 0000000000..16ad09cef1 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_preposition.go @@ -0,0 +1,100 @@ +package gofakeit + +// Preposition will generate a random preposition +func Preposition() string { return preposition(GlobalFaker) } + +// Preposition will generate a random preposition +func (f *Faker) Preposition() string { return preposition(f) } + +func preposition(f *Faker) string { + var prepType = map[int]string{ + 0: "preposition_simple", + 1: "preposition_double", + 2: "preposition_compound", + } + return getRandValue(f, []string{"word", prepType[number(f, 0, 2)]}) +} + +// PrepositionSimple will generate a random simple preposition +func PrepositionSimple() string { return prepositionSimple(GlobalFaker) } + +// PrepositionSimple will generate a random simple preposition +func (f *Faker) PrepositionSimple() string { return prepositionSimple(f) } + +func prepositionSimple(f *Faker) string { + return getRandValue(f, []string{"word", "preposition_simple"}) +} + +// PrepositionDouble will generate a random double preposition +func PrepositionDouble() string { return prepositionDouble(GlobalFaker) } + +// PrepositionDouble will generate a random double preposition +func (f *Faker) PrepositionDouble() string { return prepositionDouble(f) } + +func prepositionDouble(f *Faker) string { + return getRandValue(f, []string{"word", "preposition_double"}) +} + +// PrepositionCompound will generate a random compound preposition +func PrepositionCompound() string { return prepositionCompound(GlobalFaker) } + +// PrepositionCompound will generate a random compound preposition +func (f *Faker) PrepositionCompound() string { return prepositionCompound(f) } + +func prepositionCompound(f *Faker) string { + return getRandValue(f, []string{"word", "preposition_compound"}) +} + +func addWordPrepositionLookup() { + AddFuncLookup("preposition", Info{ + Display: "Preposition", + Category: "word", + Description: "Words used to express the relationship of a noun or pronoun to other words in a sentence", + Example: "other than", + Output: "string", + Aliases: []string{"relationship connector", "grammar link", "sentence bridge", "word connector"}, + Keywords: []string{"preposition", "relationship", "noun", "pronoun", "sentence", "grammar", "express", "connector", "link", "relational"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return preposition(f), nil + }, + }) + + AddFuncLookup("prepositionsimple", Info{ + Display: "Preposition Simple", + Category: "word", + Description: "Single-word preposition showing relationships between 2 parts of a sentence", + Example: "out", + Output: "string", + Aliases: []string{"basic connector", "fundamental link", "single element", "grammar bridge"}, + Keywords: []string{"preposition", "simple", "single-word", "relationships", "parts", "sentence", "grammar", "showing", "basic", "fundamental", "elementary"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return prepositionSimple(f), nil + }, + }) + + AddFuncLookup("prepositiondouble", Info{ + Display: "Preposition Double", + Category: "word", + Description: "Two-word combination preposition, indicating a complex relation", + Example: "before", + Output: "string", + Aliases: []string{"two-word connector", "complex relation", "combination element", "grammar bridge"}, + Keywords: []string{"preposition", "double", "two-word", "combination", "complex", "relation", "grammar", "indicating", "compound", "multi-word", "paired"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return prepositionDouble(f), nil + }, + }) + + AddFuncLookup("prepositioncompound", Info{ + Display: "Preposition Compound", + Category: "word", + Description: "Preposition that can be formed by combining two or more prepositions", + Example: "according to", + Output: "string", + Aliases: []string{"multi-part connector", "complex combination", "formed element", "grammar bridge"}, + Keywords: []string{"preposition", "compound", "combining", "two", "more", "prepositions", "grammar", "formed", "complex", "multi-part", "constructed"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return prepositionCompound(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_pronoun.go b/vendor/github.com/brianvoe/gofakeit/v7/word_pronoun.go new file mode 100644 index 0000000000..f13ae14a87 --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_pronoun.go @@ -0,0 +1,220 @@ +package gofakeit + +// Pronoun will generate a random pronoun +func Pronoun() string { return pronoun(GlobalFaker) } + +// Pronoun will generate a random pronoun +func (f *Faker) Pronoun() string { return pronoun(f) } + +func pronoun(f *Faker) string { + var pronounType = map[int]string{ + 0: "pronoun_personal", + 1: "pronoun_object", + 2: "pronoun_possessive", + 3: "pronoun_reflective", + 4: "pronoun_indefinite", + 5: "pronoun_demonstrative", + 6: "pronoun_interrogative", + 7: "pronoun_relative", + } + return getRandValue(f, []string{"word", pronounType[number(f, 0, 7)]}) +} + +// PronounPersonal will generate a random personal pronoun +func PronounPersonal() string { return pronounPersonal(GlobalFaker) } + +// PronounPersonal will generate a random personal pronoun +func (f *Faker) PronounPersonal() string { return pronounPersonal(f) } + +func pronounPersonal(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_personal"}) +} + +// PronounObject will generate a random object pronoun +func PronounObject() string { return pronounObject(GlobalFaker) } + +// PronounObject will generate a random object pronoun +func (f *Faker) PronounObject() string { return pronounObject(f) } + +func pronounObject(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_object"}) +} + +// PronounPossessive will generate a random possessive pronoun +func PronounPossessive() string { return pronounPossessive(GlobalFaker) } + +// PronounPossessive will generate a random possessive pronoun +func (f *Faker) PronounPossessive() string { return pronounPossessive(f) } + +func pronounPossessive(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_possessive"}) +} + +// PronounReflective will generate a random reflective pronoun +func PronounReflective() string { return pronounReflective(GlobalFaker) } + +// PronounReflective will generate a random reflective pronoun +func (f *Faker) PronounReflective() string { return pronounReflective(f) } + +func pronounReflective(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_reflective"}) +} + +// PronounIndefinite will generate a random indefinite pronoun +func PronounIndefinite() string { return pronounIndefinite(GlobalFaker) } + +// PronounIndefinite will generate a random indefinite pronoun +func (f *Faker) PronounIndefinite() string { return pronounIndefinite(f) } + +func pronounIndefinite(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_indefinite"}) +} + +// PronounDemonstrative will generate a random demonstrative pronoun +func PronounDemonstrative() string { return pronounDemonstrative(GlobalFaker) } + +// PronounDemonstrative will generate a random demonstrative pronoun +func (f *Faker) PronounDemonstrative() string { return pronounDemonstrative(f) } + +func pronounDemonstrative(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_demonstrative"}) +} + +// PronounInterrogative will generate a random interrogative pronoun +func PronounInterrogative() string { return pronounInterrogative(GlobalFaker) } + +// PronounInterrogative will generate a random interrogative pronoun +func (f *Faker) PronounInterrogative() string { return pronounInterrogative(f) } + +func pronounInterrogative(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_interrogative"}) +} + +// PronounRelative will generate a random relative pronoun +func PronounRelative() string { return pronounRelative(GlobalFaker) } + +// PronounRelative will generate a random relative pronoun +func (f *Faker) PronounRelative() string { return pronounRelative(f) } + +func pronounRelative(f *Faker) string { + return getRandValue(f, []string{"word", "pronoun_relative"}) +} + +func addWordPronounLookup() { + AddFuncLookup("pronoun", Info{ + Display: "Pronoun", + Category: "word", + Description: "Word used in place of a noun to avoid repetition", + Example: "me", + Output: "string", + Aliases: []string{"noun substitute", "word replacement", "grammar element", "reference word"}, + Keywords: []string{"pronoun", "noun", "replacement", "grammar", "repetition", "substitute", "reference", "avoidance"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronoun(f), nil + }, + }) + + AddFuncLookup("pronounpersonal", Info{ + Display: "Pronoun Personal", + Category: "word", + Description: "Pronoun referring to a specific persons or things", + Example: "it", + Output: "string", + Aliases: []string{"personal reference", "specific entity", "individual pronoun", "grammar element"}, + Keywords: []string{"pronoun", "personal", "specific", "person", "thing", "grammar", "referring", "individual", "entity", "identity"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounPersonal(f), nil + }, + }) + + AddFuncLookup("pronounobject", Info{ + Display: "Pronoun Object", + Category: "word", + Description: "Pronoun used as the object of a verb or preposition", + Example: "it", + Output: "string", + Aliases: []string{"object reference", "verb object", "preposition object", "grammar function"}, + Keywords: []string{"pronoun", "verb", "preposition", "grammar", "used", "objective", "case", "receiver"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounObject(f), nil + }, + }) + + AddFuncLookup("pronounpossessive", Info{ + Display: "Pronoun Possessive", + Category: "word", + Description: "Pronoun indicating ownership or belonging", + Example: "mine", + Output: "string", + Aliases: []string{"ownership indicator", "belonging reference", "possession word", "grammar element"}, + Keywords: []string{"pronoun", "possessive", "ownership", "belonging", "grammar", "indicating", "possession", "property"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounPossessive(f), nil + }, + }) + + AddFuncLookup("pronounreflective", Info{ + Display: "Pronoun Reflective", + Category: "word", + Description: "Pronoun referring back to the subject of the sentence", + Example: "myself", + Output: "string", + Aliases: []string{"self reference", "subject reflection", "backward reference", "grammar element"}, + Keywords: []string{"pronoun", "reflective", "subject", "sentence", "grammar", "referring", "reflexive", "self", "mirror"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounReflective(f), nil + }, + }) + + AddFuncLookup("pronounindefinite", Info{ + Display: "Pronoun Indefinite", + Category: "word", + Description: "Pronoun that does not refer to a specific person or thing", + Example: "few", + Output: "string", + Aliases: []string{"vague reference", "general pronoun", "unspecific word", "grammar element"}, + Keywords: []string{"pronoun", "indefinite", "specific", "person", "grammar", "refer", "vague", "general", "unspecified"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounIndefinite(f), nil + }, + }) + + AddFuncLookup("pronoundemonstrative", Info{ + Display: "Pronoun Demonstrative", + Category: "word", + Description: "Pronoun that points out specific people or things", + Example: "this", + Output: "string", + Aliases: []string{"pointing reference", "specific indicator", "demonstration word", "grammar element"}, + Keywords: []string{"pronoun", "demonstrative", "specific", "people", "grammar", "points", "indicate", "reference", "pointing"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounDemonstrative(f), nil + }, + }) + + AddFuncLookup("pronouninterrogative", Info{ + Display: "Pronoun Interrogative", + Category: "word", + Description: "Pronoun used to ask questions", + Example: "what", + Output: "string", + Aliases: []string{"question word", "inquiry reference", "interrogation element", "grammar function"}, + Keywords: []string{"pronoun", "interrogative", "question", "ask", "grammar", "used", "inquiry", "wh-word", "questioning"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounInterrogative(f), nil + }, + }) + + AddFuncLookup("pronounrelative", Info{ + Display: "Pronoun Relative", + Category: "word", + Description: "Pronoun that introduces a clause, referring back to a noun or pronoun", + Example: "as", + Output: "string", + Aliases: []string{"backward reference", "linking pronoun", "grammar element"}, + Keywords: []string{"pronoun", "relative", "clause", "noun", "grammar", "introduces", "referring", "connector", "link"}, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return pronounRelative(f), nil + }, + }) +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/word_verb.go b/vendor/github.com/brianvoe/gofakeit/v7/word_verb.go new file mode 100644 index 0000000000..6d24efa18f --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/word_verb.go @@ -0,0 +1,164 @@ +package gofakeit + +// Verb will generate a random verb +func Verb() string { return verb(GlobalFaker) } + +// Verb will generate a random verb +func (f *Faker) Verb() string { return verb(f) } + +func verb(f *Faker) string { + var verbType = map[int]string{ + 0: "verb_action", + 1: "verb_linking", + 2: "verb_helping", + } + return getRandValue(f, []string{"word", verbType[number(f, 0, 2)]}) +} + +// VerbAction will generate a random action verb +func VerbAction() string { return verbAction(GlobalFaker) } + +// VerbAction will generate a random action verb +func (f *Faker) VerbAction() string { return verbAction(f) } + +func verbAction(f *Faker) string { return getRandValue(f, []string{"word", "verb_action"}) } + +// VerbTransitive will generate a random transitive verb +func VerbTransitive() string { return verbTransitive(GlobalFaker) } + +// VerbTransitive will generate a random transitive verb +func (f *Faker) VerbTransitive() string { return verbTransitive(f) } + +func verbTransitive(f *Faker) string { return getRandValue(f, []string{"word", "verb_transitive"}) } + +// VerbIntransitive will generate a random intransitive verb +func VerbIntransitive() string { return verbIntransitive(GlobalFaker) } + +// VerbIntransitive will generate a random intransitive verb +func (f *Faker) VerbIntransitive() string { return verbIntransitive(f) } + +func verbIntransitive(f *Faker) string { + return getRandValue(f, []string{"word", "verb_intransitive"}) +} + +// VerbLinking will generate a random linking verb +func VerbLinking() string { return verbLinking(GlobalFaker) } + +// VerbLinking will generate a random linking verb +func (f *Faker) VerbLinking() string { return verbLinking(f) } + +func verbLinking(f *Faker) string { return getRandValue(f, []string{"word", "verb_linking"}) } + +// VerbHelping will generate a random helping verb +func VerbHelping() string { return verbHelping(GlobalFaker) } + +// VerbHelping will generate a random helping verb +func (f *Faker) VerbHelping() string { return verbHelping(f) } + +func verbHelping(f *Faker) string { return getRandValue(f, []string{"word", "verb_helping"}) } + +func addWordVerbLookup() { + AddFuncLookup("verb", Info{ + Display: "Verb", + Category: "word", + Description: "Word expressing an action, event or state", + Example: "release", + Output: "string", + Aliases: []string{ + "action word", "doing word", "predicate word", "verb form", "process word", + }, + Keywords: []string{ + "movement", "change", "existence", "process", "condition", "happening", "expression", "statement", "activity", "function", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return verb(f), nil + }, + }) + + AddFuncLookup("verbaction", Info{ + Display: "Action Verb", + Category: "word", + Description: "Verb indicating a physical or mental action", + Example: "close", + Output: "string", + Aliases: []string{ + "movement word", "doing action", "behavior word", "mental action", "physical action", + }, + Keywords: []string{ + "activity", "task", "operation", "motion", "effort", "performance", "gesture", "response", "execution", "behavior", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return verbAction(f), nil + }, + }) + + AddFuncLookup("verbtransitive", Info{ + Display: "Transitive Verb", + Category: "word", + Description: "Verb that requires a direct object to complete its meaning", + Example: "follow", + Output: "string", + Aliases: []string{ + "object verb", "requires object", "dependent verb", "object-linked", "receiver word", + }, + Keywords: []string{ + "direct", "receiver", "transfer", "target", "completion", "relation", "dependent", "object-based", "action-transfer", "link", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return verbTransitive(f), nil + }, + }) + + AddFuncLookup("verbintransitive", Info{ + Display: "Intransitive Verb", + Category: "word", + Description: "Verb that does not require a direct object to complete its meaning", + Example: "laugh", + Output: "string", + Aliases: []string{ + "standalone verb", "independent word", "no object verb", "complete action", "self-contained verb", + }, + Keywords: []string{ + "autonomous", "independent", "non-transfer", "self-complete", "expression", "state", "behavior", "occur", "perform", "action-only", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return verbIntransitive(f), nil + }, + }) + + AddFuncLookup("verblinking", Info{ + Display: "Linking Verb", + Category: "word", + Description: "Verb that connects the subject of a sentence to a subject complement", + Example: "was", + Output: "string", + Aliases: []string{ + "connecting verb", "copular verb", "bridge word", "link word", "equating verb", + }, + Keywords: []string{ + "relation", "connection", "equivalence", "identification", "state", "being", "subject-link", "copula", "connector", "description", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return verbLinking(f), nil + }, + }) + + AddFuncLookup("verbhelping", Info{ + Display: "Helping Verb", + Category: "word", + Description: "Auxiliary verb that helps the main verb complete the sentence", + Example: "be", + Output: "string", + Aliases: []string{ + "auxiliary verb", "supporting verb", "assisting word", "helper verb", "modal verb", + }, + Keywords: []string{ + "tense", "mood", "voice", "aspect", "support", "structure", + "compound", "formation", "assistance", + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + return verbHelping(f), nil + }, + }) + +} diff --git a/vendor/github.com/brianvoe/gofakeit/v7/xml.go b/vendor/github.com/brianvoe/gofakeit/v7/xml.go new file mode 100644 index 0000000000..a3bb99f8de --- /dev/null +++ b/vendor/github.com/brianvoe/gofakeit/v7/xml.go @@ -0,0 +1,360 @@ +package gofakeit + +import ( + "bytes" + "encoding/json" + "encoding/xml" + "errors" + "reflect" +) + +// XMLOptions defines values needed for json generation +type XMLOptions struct { + Type string `json:"type" xml:"type" fake:"{randomstring:[array,single]}"` // single or array + RootElement string `json:"root_element" xml:"root_element"` + RecordElement string `json:"record_element" xml:"record_element"` + RowCount int `json:"row_count" xml:"row_count" fake:"{number:1,10}"` + Indent bool `json:"indent" xml:"indent"` + Fields []Field `json:"fields" xml:"fields" fake:"{fields}"` +} + +type xmlArray struct { + XMLName xml.Name + Array []xmlMap +} + +type xmlMap struct { + XMLName xml.Name + KeyOrder []string + Map map[string]any `xml:",chardata"` +} + +type xmlEntry struct { + XMLName xml.Name + Value any `xml:",chardata"` +} + +func (m xmlMap) MarshalXML(e *xml.Encoder, start xml.StartElement) error { + if len(m.Map) == 0 { + return nil + } + + start.Name = m.XMLName + + err := e.EncodeToken(start) + if err != nil { + return err + } + + err = xmlMapLoop(e, &m) + if err != nil { + return err + } + + return e.EncodeToken(start.End()) +} + +func xmlMapLoop(e *xml.Encoder, m *xmlMap) error { + var err error + + // Check if xmlmap has key order if not create it + // Get key order by order of fields array + if m.KeyOrder == nil { + m.KeyOrder = make([]string, len(m.Map)) + for k := range m.Map { + m.KeyOrder = append(m.KeyOrder, k) + } + } + + for _, key := range m.KeyOrder { + v := reflect.ValueOf(m.Map[key]) + + // Always get underlyning Value of value + if v.Kind() == reflect.Ptr { + v = reflect.Indirect(v) + } + + switch v.Kind() { + case reflect.Bool, + reflect.String, + reflect.Int, reflect.Int8, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint32, reflect.Uint64, + reflect.Float32, reflect.Float64: + err = e.Encode(xmlEntry{XMLName: xml.Name{Local: key}, Value: m.Map[key]}) + if err != nil { + return err + } + case reflect.Slice: + e.EncodeToken(xml.StartElement{Name: xml.Name{Local: key}}) + for i := 0; i < v.Len(); i++ { + err = e.Encode(xmlEntry{XMLName: xml.Name{Local: "value"}, Value: v.Index(i).String()}) + if err != nil { + return err + } + } + e.EncodeToken(xml.EndElement{Name: xml.Name{Local: key}}) + case reflect.Map: + err = e.Encode(xmlMap{ + XMLName: xml.Name{Local: key}, + Map: m.Map[key].(map[string]any), + }) + if err != nil { + return err + } + case reflect.Struct: + // Convert struct to map[string]any + // So we can rewrap element + var inInterface map[string]any + inrec, _ := json.Marshal(m.Map[key]) + json.Unmarshal(inrec, &inInterface) + + err = e.Encode(xmlMap{ + XMLName: xml.Name{Local: key}, + Map: inInterface, + }) + if err != nil { + return err + } + default: + err = e.Encode(m.Map[key]) + if err != nil { + return err + } + } + } + + return nil +} + +// XML generates an object or an array of objects in json format +// A nil XMLOptions returns a randomly structured XML. +func XML(xo *XMLOptions) ([]byte, error) { return xmlFunc(GlobalFaker, xo) } + +// XML generates an object or an array of objects in json format +// A nil XMLOptions returns a randomly structured XML. +func (f *Faker) XML(xo *XMLOptions) ([]byte, error) { return xmlFunc(f, xo) } + +func xmlFunc(f *Faker, xo *XMLOptions) ([]byte, error) { + if xo == nil { + // We didn't get a XMLOptions, so create a new random one + err := f.Struct(&xo) + if err != nil { + return nil, err + } + } + + // Check to make sure they passed in a type + if xo.Type != "single" && xo.Type != "array" { + return nil, errors.New("invalid type, must be array or object") + } + + // Check fields length + if len(xo.Fields) <= 0 { + return nil, errors.New("must pass fields in order to build json object(s)") + } + + // Check root element string + if xo.RootElement == "" { + xo.RecordElement = "xml" + } + + // Check record element string + if xo.RecordElement == "" { + xo.RecordElement = "record" + } + + // Get key order by order of fields array + keyOrder := make([]string, 0, len(xo.Fields)) + for _, f := range xo.Fields { + keyOrder = append(keyOrder, f.Name) + } + + if xo.Type == "single" { + v := xmlMap{ + XMLName: xml.Name{Local: xo.RootElement}, + KeyOrder: keyOrder, + Map: make(map[string]any), + } + + // Loop through fields and add to them to map[string]any + for _, field := range xo.Fields { + // Get function info + funcInfo := GetFuncLookup(field.Function) + if funcInfo == nil { + return nil, errors.New("invalid function, " + field.Function + " does not exist") + } + + value, err := funcInfo.Generate(f, &field.Params, funcInfo) + if err != nil { + return nil, err + } + + v.Map[field.Name] = value + } + + // Marshal into bytes + var b bytes.Buffer + x := xml.NewEncoder(&b) + if xo.Indent { + x.Indent("", " ") + } + err := x.Encode(v) + if err != nil { + return nil, err + } + + return b.Bytes(), nil + } + + if xo.Type == "array" { + // Make sure you set a row count + if xo.RowCount <= 0 { + return nil, errors.New("must have row count") + } + + xa := xmlArray{ + XMLName: xml.Name{Local: xo.RootElement}, + Array: make([]xmlMap, xo.RowCount), + } + + for i := 1; i <= int(xo.RowCount); i++ { + v := xmlMap{ + XMLName: xml.Name{Local: xo.RecordElement}, + KeyOrder: keyOrder, + Map: make(map[string]any), + } + + // Loop through fields and add to them to map[string]any + for _, field := range xo.Fields { + if field.Function == "autoincrement" { + v.Map[field.Name] = i + continue + } + + // Get function info + funcInfo := GetFuncLookup(field.Function) + if funcInfo == nil { + return nil, errors.New("invalid function, " + field.Function + " does not exist") + } + + value, err := funcInfo.Generate(f, &field.Params, funcInfo) + if err != nil { + return nil, err + } + + v.Map[field.Name] = value + } + + xa.Array = append(xa.Array, v) + } + + // Marshal into bytes + var b bytes.Buffer + x := xml.NewEncoder(&b) + if xo.Indent { + x.Indent("", " ") + } + err := x.Encode(xa) + if err != nil { + return nil, err + } + + return b.Bytes(), nil + } + + return nil, errors.New("invalid type, must be array or object") +} + +func addFileXMLLookup() { + AddFuncLookup("xml", Info{ + Display: "XML", + Category: "file", + Description: "Generates an single or an array of elements in xml format", + Example: ` + + Markus + Moen + Dc0VYXjkWABx + + + Osborne + Hilll + XPJ9OVNbs5lm + +`, + Output: "[]byte", + ContentType: "application/xml", + Aliases: []string{ + "xml document", + "extensible markup", + "tagged data", + "hierarchical structure", + "serialized tree", + }, + Keywords: []string{ + "xml", "extensible", "markup", "language", "elements", "format", + "structured", "generates", "tags", "attributes", "nested", + }, + Params: []Param{ + {Field: "type", Display: "Type", Type: "string", Default: "single", Options: []string{"single", "array"}, Description: "Type of XML, single or array"}, + {Field: "rootelement", Display: "Root Element", Type: "string", Default: "xml", Description: "Root element wrapper name"}, + {Field: "recordelement", Display: "Record Element", Type: "string", Default: "record", Description: "Record element for each record row"}, + {Field: "rowcount", Display: "Row Count", Type: "int", Default: "100", Description: "Number of rows in JSON array"}, + {Field: "indent", Display: "Indent", Type: "bool", Default: "false", Description: "Whether or not to add indents and newlines"}, + {Field: "fields", Display: "Fields", Type: "[]Field", Description: "Fields containing key name and function to run in json format"}, + }, + Generate: func(f *Faker, m *MapParams, info *Info) (any, error) { + xo := XMLOptions{} + + typ, err := info.GetString(m, "type") + if err != nil { + return nil, err + } + xo.Type = typ + + rootElement, err := info.GetString(m, "rootelement") + if err != nil { + return nil, err + } + xo.RootElement = rootElement + + recordElement, err := info.GetString(m, "recordelement") + if err != nil { + return nil, err + } + xo.RecordElement = recordElement + + rowcount, err := info.GetInt(m, "rowcount") + if err != nil { + return nil, err + } + xo.RowCount = rowcount + + fieldsStr, err := info.GetStringArray(m, "fields") + if err != nil { + return nil, err + } + + indent, err := info.GetBool(m, "indent") + if err != nil { + return nil, err + } + xo.Indent = indent + + // Check to make sure fields has length + if len(fieldsStr) > 0 { + xo.Fields = make([]Field, len(fieldsStr)) + + for i, f := range fieldsStr { + // Unmarshal fields string into fields array + err = json.Unmarshal([]byte(f), &xo.Fields[i]) + if err != nil { + return nil, errors.New("unable to decode json string") + } + } + } + + return xmlFunc(f, &xo) + }, + }) +} diff --git a/vendor/github.com/cention-sany/utf7/.travis.yml b/vendor/github.com/cention-sany/utf7/.travis.yml new file mode 100644 index 0000000000..f3e306bd84 --- /dev/null +++ b/vendor/github.com/cention-sany/utf7/.travis.yml @@ -0,0 +1,12 @@ +language: go + +go: + - 1.4.2 + - 1.7.4 + - tip + +install: + - go get -v ./... + - go get golang.org/x/text/encoding + - go get golang.org/x/text/transform + diff --git a/vendor/github.com/cention-sany/utf7/LICENSE b/vendor/github.com/cention-sany/utf7/LICENSE new file mode 100644 index 0000000000..099f43b330 --- /dev/null +++ b/vendor/github.com/cention-sany/utf7/LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2013 The Go-IMAP Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the + distribution. + + * Neither the name of the go-imap project nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/cention-sany/utf7/README.md b/vendor/github.com/cention-sany/utf7/README.md new file mode 100644 index 0000000000..27bd77c353 --- /dev/null +++ b/vendor/github.com/cention-sany/utf7/README.md @@ -0,0 +1,2 @@ +# utf7 [![Build Status](https://travis-ci.org/cention-sany/utf7.png?branch=master)](https://travis-ci.org/cention-sany/utf7) [![GoDoc](https://godoc.org/github.com/cention-sany/utf7?status.png)](https://godoc.org/github.com/cention-sany/utf7) [![Exago](https://api.exago.io:443/badge/cov/github.com/cention-sany/utf7)](https://exago.io/project/github.com/cention-sany/utf7) [![Exago](https://api.exago.io:443/badge/rank/github.com/cention-sany/utf7)](https://exago.io/project/github.com/cention-sany/utf7) +RFC 2152 - UTF7 encoding and decoding. diff --git a/vendor/github.com/cention-sany/utf7/utf7.go b/vendor/github.com/cention-sany/utf7/utf7.go new file mode 100644 index 0000000000..36f39bba92 --- /dev/null +++ b/vendor/github.com/cention-sany/utf7/utf7.go @@ -0,0 +1,518 @@ +// Copyright 2013 The Go-IMAP Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +This package modified from: +https://github.com/mxk/go-imap/blob/master/imap/utf7.go +https://github.com/mxk/go-imap/blob/master/imap/utf7_test.go +IMAP specification uses modified UTF-7. Following are the differences: + 1) Printable US-ASCII except & (0x20 to 0x25 and 0x27 to 0x7e) MUST represent by themselves. + 2) '&' is used to shift modified BASE64 instead of '+'. + 3) Can NOT use superfluous null shift (&...-&...- should be just &......-). + 4) ',' is used in BASE64 code instead of '/'. + 5) '&' is represented '&-'. You can have many '&-&-&-&-'. + 6) No implicit shift from BASE64 to US-ASCII. All BASE64 must end with '-'. + +Actual UTF-7 specification: +Rule 1: direct characters: 62 alphanumeric characters and 9 symbols: ' ( ) , - . / : ? +Rule 2: optional direct characters: all other printable characters in the range +U+0020–U+007E except ~ \ + and space. Plus sign (+) may be encoded as +- +(special case). Plus sign (+) mean the start of 'modified Base64 encoded UTF-16'. +The end of this block is indicated by any character not in the modified Base64. +If character after modified Base64 is a '-' then it is consumed. + +Example: + "1 + 1 = 2" is encoded as "1 +- 1 +AD0 2" //+AD0 is the '=' sign. + "£1" is encoded as "+AKM-1" //+AKM- is the '£' sign where '-' is consumed. + +A "+" character followed immediately by any character other than members +of modified Base64 or "-" is an ill-formed sequence. Convert to Unicode code +point then apply modified BASE64 (rfc2045) to it. Modified BASE64 do not use +padding instead add extra bits. Lines should never be broken in the middle of +a UTF-7 shifted sequence. Rule 3: Space, tab, carriage return and line feed may +also be represented directly as single ASCII bytes. Further content transfer +encoding may be needed if using in email environment. +*/ +package utf7 + +import ( + "bytes" + "encoding/base64" + "errors" + "io/ioutil" + "unicode/utf16" + "unicode/utf8" + + "golang.org/x/text/encoding" + "golang.org/x/text/transform" +) + +const ( + uRepl = '\uFFFD' // Unicode replacement code point + u7min = 0x20 // Minimum self-representing UTF-7 value + u7max = 0x7E // Maximum self-representing UTF-7 value +) + +// copy from golang.org/x/text/encoding/internal +type simpleEncoding struct { + Decoder transform.Transformer + Encoder transform.Transformer +} + +func (e *simpleEncoding) NewDecoder() *encoding.Decoder { + return &encoding.Decoder{Transformer: e.Decoder} +} + +func (e *simpleEncoding) NewEncoder() *encoding.Encoder { + return &encoding.Encoder{Transformer: e.Encoder} +} + +var ( + UTF7 encoding.Encoding = &simpleEncoding{ + utf7Decoder{}, + utf7Encoder{}, + } +) + +// ErrBadUTF7 is returned to indicate invalid modified UTF-7 encoding. +var ErrBadUTF7 = errors.New("utf7: bad utf-7 encoding") + +// Base64 codec for code points outside of the 0x20-0x7E range. +const modifiedbase64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" + +var u7enc = base64.NewEncoding(modifiedbase64) + +func isModifiedBase64(r byte) bool { + if r >= 'A' && r <= 'Z' { + return true + } else if r >= 'a' && r <= 'z' { + return true + } else if r >= '0' && r <= '9' { + return true + } else if r == '+' || r == '/' { + return true + } + return false + // bs := []byte(modifiedbase64) + // for _, b := range bs { + // if b == r { + // return true + // } + // } + // return false +} + +type utf7Decoder struct { + transform.NopResetter +} + +func (d utf7Decoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + var implicit bool + var tmp int + + nd, n := len(dst), len(src) + if n == 0 && !atEOF { + return 0, 0, transform.ErrShortSrc + } + for ; nSrc < n; nSrc++ { + if nDst >= nd { + return nDst, nSrc, transform.ErrShortDst + } + if c := src[nSrc]; ((c < u7min || c > u7max) && + c != '\t' && c != '\r' && c != '\n') || + c == '~' || c == '\\' { + return nDst, nSrc, ErrBadUTF7 // Illegal code point in ASCII mode + } else if c != '+' { + dst[nDst] = c // character can self represent + nDst++ + continue + } + // found '+' + start := nSrc + 1 + tmp = nSrc // nSrc remain pointing to '+', tmp point to end of BASE64 + // Find the end of the Base64 or "+-" segment + implicit = false + for tmp++; tmp < n && src[tmp] != '-'; tmp++ { + if !isModifiedBase64(src[tmp]) { + if tmp == start { + return nDst, tmp, ErrBadUTF7 // '+' next char must modified base64 + } + // implicit shift back to ASCII - no need '-' character + implicit = true + break + } + } + if tmp == start { + if tmp == n { + // did not find '-' sign and '+' is last character + // total nSrc no include '+' + if atEOF { + return nDst, nSrc, ErrBadUTF7 // '+' can not at the end + } + // '+' can not at the end, so get more data + return nDst, nSrc, transform.ErrShortSrc + } + dst[nDst] = '+' // Escape sequence "+-" + nDst++ + } else if tmp == n && !atEOF { + // no end of BASE64 marker and still has data + // probably the marker at next block of data + // so go get more data. + return nDst, nSrc, transform.ErrShortSrc + } else if b := utf7dec(src[start:tmp]); len(b) > 0 { + if len(b)+nDst > nd { + // need more space on dst for the decoded modified BASE64 unicode + // total nSrc no include '+' + return nDst, nSrc, transform.ErrShortDst + } + copy(dst[nDst:], b) // Control or non-ASCII code points in Base64 + nDst += len(b) + if implicit { + if nDst >= nd { + return nDst, tmp, transform.ErrShortDst + } + dst[nDst] = src[tmp] // implicit shift + nDst++ + } + if tmp == n { + return nDst, tmp, nil + } + } else { + return nDst, nSrc, ErrBadUTF7 // bad encoding + } + nSrc = tmp + } + return +} + +type utf7Encoder struct { + transform.NopResetter +} + +func calcExpectedSize(runeSize int) (round int) { + numerator := runeSize * 17 + round = numerator / 12 + remain := numerator % 12 + if remain >= 6 { + round++ + } + return +} + +func (e utf7Encoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + var c byte + var b []byte + var endminus, needMoreSrc, needMoreDst, foundASCII, hasRuneStart bool + var tmp, compare, lastRuneStart int + var currentSize, maxRuneStart int + var rn rune + + nd, n := len(dst), len(src) + if n == 0 { + if !atEOF { + return 0, 0, transform.ErrShortSrc + } else { + return 0, 0, nil + } + } + for nSrc = 0; nSrc < n; { + if nDst >= nd { + return nDst, nSrc, transform.ErrShortDst + } + c = src[nSrc] + if canSelf(c) { + nSrc++ + dst[nDst] = c + nDst++ + continue + } else if c == '+' { + if nDst+2 > nd { + return nDst, nSrc, transform.ErrShortDst + } + nSrc++ + dst[nDst], dst[nDst+1] = '+', '-' + nDst += 2 + continue + } + start := nSrc + tmp = nSrc // nSrc still point to first non-ASCII + currentSize = 0 + maxRuneStart = nSrc + needMoreDst = false + if utf8.RuneStart(src[nSrc]) { + hasRuneStart = true + } else { + hasRuneStart = false + } + foundASCII = true + for tmp++; tmp < n && !canSelf(src[tmp]) && src[tmp] != '+'; tmp++ { + // if next printable ASCII code point found the loop stop + if utf8.RuneStart(src[tmp]) { + hasRuneStart = true + lastRuneStart = tmp + rn, _ = utf8.DecodeRune(src[maxRuneStart:tmp]) + if rn >= 0x10000 { + currentSize += 4 + } else { + currentSize += 2 + } + if calcExpectedSize(currentSize)+2 > nd-nDst { + needMoreDst = true + } else { + maxRuneStart = tmp + } + } + } + + // following to adjust tmp to right pointer as now tmp can not + // find any good ending (searching end with no result). Adjustment + // base on another earlier feasible valid rune position. + needMoreSrc = false + if tmp == n { + foundASCII = false + if !atEOF { + if !hasRuneStart { + return nDst, nSrc, transform.ErrShortSrc + } else { + //re-adjust tmp to good position to encode + if !utf8.Valid(src[maxRuneStart:]) { + if maxRuneStart == start { + return nDst, nSrc, transform.ErrShortSrc + } + needMoreSrc = true + tmp = maxRuneStart + } + } + } + } + + endminus = false + if hasRuneStart && !needMoreSrc { + // need check if dst enough buffer for transform + rn, _ = utf8.DecodeRune(src[lastRuneStart:tmp]) + if rn >= 0x10000 { + currentSize += 4 + } else { + currentSize += 2 + } + if calcExpectedSize(currentSize)+2 > nd-nDst { + // can not use tmp value as transofrmed size too + // big for dst + endminus = true + needMoreDst = true + tmp = maxRuneStart + } + } + + b = utf7enc(src[start:tmp]) + if len(b) < 2 || b[0] != '+' { + return nDst, nSrc, ErrBadUTF7 // Illegal code point in ASCII mode + } + + if foundASCII { + // printable ASCII found - check if BASE64 type + if isModifiedBase64(src[tmp]) || src[tmp] == '-' { + endminus = true + } + } else { + endminus = true + } + compare = nDst + len(b) + if endminus { + compare++ + } + if compare > nd { + return nDst, nSrc, transform.ErrShortDst + } + copy(dst[nDst:], b) + nDst += len(b) + if endminus { + dst[nDst] = '-' + nDst++ + } + nSrc = tmp + + if needMoreDst { + return nDst, nSrc, transform.ErrShortDst + } + + if needMoreSrc { + return nDst, nSrc, transform.ErrShortSrc + } + } + return +} + +// UTF7Encode converts a string from UTF-8 encoding to modified UTF-7. This +// encoding is used by the Mailbox International Naming Convention (RFC 3501 +// section 5.1.3). Invalid UTF-8 byte sequences are replaced by the Unicode +// replacement code point (U+FFFD). +func UTF7Encode(s string) string { + return string(UTF7EncodeBytes([]byte(s))) +} + +const ( + setD = iota + setO + setRule3 + setInvalid +) + +// get the set of characters group. +func getSetType(c byte) int { + if (c >= 44 && c <= ':') || c == '?' { + return setD + } else if c == 39 || c == '(' || c == ')' { + return setD + } else if c >= 'A' && c <= 'Z' { + return setD + } else if c >= 'a' && c <= 'z' { + return setD + } else if c == '+' || c == '\\' { + return setInvalid + } else if c > ' ' && c < '~' { + return setO + } else if c == ' ' || c == '\t' || + c == '\r' || c == '\n' { + return setRule3 + } + return setInvalid +} + +// Check if can represent by themselves. +func canSelf(c byte) bool { + t := getSetType(c) + if t == setInvalid { + return false + } + return true +} + +// UTF7EncodeBytes converts a byte slice from UTF-8 encoding to modified UTF-7. +func UTF7EncodeBytes(s []byte) []byte { + input := bytes.NewReader(s) + reader := transform.NewReader(input, UTF7.NewEncoder()) + output, err := ioutil.ReadAll(reader) + if err != nil { + return nil + } + return output +} + +// utf7enc converts string s from UTF-8 to UTF-16-BE, encodes the result as +// Base64, removes the padding, and adds UTF-7 shifts. +func utf7enc(s []byte) []byte { + // len(s) is sufficient for UTF-8 to UTF-16 conversion if there are no + // control code points (see table below). + b := make([]byte, 0, len(s)+4) + for len(s) > 0 { + r, size := utf8.DecodeRune(s) + if r > utf8.MaxRune { + r, size = utf8.RuneError, 1 // Bug fix (issue 3785) + } + s = s[size:] + if r1, r2 := utf16.EncodeRune(r); r1 != uRepl { + //log.Println("surrogate triggered") + b = append(b, byte(r1>>8), byte(r1)) + r = r2 + } + b = append(b, byte(r>>8), byte(r)) + } + + // Encode as Base64 + //n := u7enc.EncodedLen(len(b)) + 2 // plus 2 for prefix '+' and suffix '-' + n := u7enc.EncodedLen(len(b)) + 1 // plus for prefix '+' + b64 := make([]byte, n) + u7enc.Encode(b64[1:], b) + + // Strip padding + n -= 2 - (len(b)+2)%3 + b64 = b64[:n] + + // Add UTF-7 shifts + b64[0] = '+' + //b64[n-1] = '-' + return b64 +} + +// UTF7Decode converts a string from modified UTF-7 encoding to UTF-8. +func UTF7Decode(u string) (s string, err error) { + b, err := UTF7DecodeBytes([]byte(u)) + s = string(b) + return +} + +// UTF7DecodeBytes converts a byte slice from modified UTF-7 encoding to UTF-8. +func UTF7DecodeBytes(u []byte) ([]byte, error) { + input := bytes.NewReader([]byte(u)) + reader := transform.NewReader(input, UTF7.NewDecoder()) + output, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + return output, nil +} + +// utf7dec extracts UTF-16-BE bytes from Base64 data and converts them to UTF-8. +// A nil slice is returned if the encoding is invalid. +func utf7dec(b64 []byte) []byte { + var b []byte + + // Allocate a single block of memory large enough to store the Base64 data + // (if padding is required), UTF-16-BE bytes, and decoded UTF-8 bytes. + // Since a 2-byte UTF-16 sequence may expand into a 3-byte UTF-8 sequence, + // double the space allocation for UTF-8. + if n := len(b64); b64[n-1] == '=' { + return nil + } else if n&3 == 0 { + b = make([]byte, u7enc.DecodedLen(n)*3) + } else { + n += 4 - n&3 + b = make([]byte, n+u7enc.DecodedLen(n)*3) + copy(b[copy(b, b64):n], []byte("==")) + b64, b = b[:n], b[n:] + } + + // Decode Base64 into the first 1/3rd of b + n, err := u7enc.Decode(b, b64) + if err != nil || n&1 == 1 { + return nil + } + + // Decode UTF-16-BE into the remaining 2/3rds of b + b, s := b[:n], b[n:] + j := 0 + for i := 0; i < n; i += 2 { + r := rune(b[i])<<8 | rune(b[i+1]) + if utf16.IsSurrogate(r) { + if i += 2; i == n { + //log.Println("surrogate error1!") + return nil + } + r2 := rune(b[i])<<8 | rune(b[i+1]) + //log.Printf("surrogate! 0x%04X 0x%04X\n", r, r2) + if r = utf16.DecodeRune(r, r2); r == uRepl { + return nil + } + } + j += utf8.EncodeRune(s[j:], r) + } + return s[:j] +} + +/* +The following table shows the number of bytes required to encode each code point +in the specified range using UTF-8 and UTF-16 representations: + ++-----------------+-------+--------+ +| Code points | UTF-8 | UTF-16 | ++-----------------+-------+--------+ +| 000000 - 00007F | 1 | 2 | +| 000080 - 0007FF | 2 | 2 | +| 000800 - 00FFFF | 3 | 2 | +| 010000 - 10FFFF | 4 | 4 | ++-----------------+-------+--------+ + +Source: http://en.wikipedia.org/wiki/Comparison_of_Unicode_encodings +*/ diff --git a/vendor/github.com/emersion/go-imap/v2/.build.yml b/vendor/github.com/emersion/go-imap/v2/.build.yml new file mode 100644 index 0000000000..7166ba620f --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/.build.yml @@ -0,0 +1,19 @@ +image: alpine/latest +packages: + - dovecot + - go +sources: + - https://github.com/emersion/go-imap#v2 +tasks: + - build: | + cd go-imap + go build -race -v ./... + - test: | + cd go-imap + go test -race ./... + - test-dovecot: | + cd go-imap + GOIMAP_TEST_DOVECOT=1 go test -race ./imapclient + - gofmt: | + cd go-imap + test -z $(gofmt -l .) diff --git a/vendor/github.com/emersion/go-imap/v2/LICENSE b/vendor/github.com/emersion/go-imap/v2/LICENSE new file mode 100644 index 0000000000..d6718dcf26 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/LICENSE @@ -0,0 +1,23 @@ +The MIT License (MIT) + +Copyright (c) 2013 The Go-IMAP Authors +Copyright (c) 2016 Proton Technologies AG +Copyright (c) 2023 Simon Ser + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/emersion/go-imap/v2/README.md b/vendor/github.com/emersion/go-imap/v2/README.md new file mode 100644 index 0000000000..c84fdb927d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/README.md @@ -0,0 +1,29 @@ +# go-imap + +[![Go Reference](https://pkg.go.dev/badge/github.com/emersion/go-imap/v2.svg)](https://pkg.go.dev/github.com/emersion/go-imap/v2) + +An [IMAP4rev2] library for Go. + +> **Note** +> This is the README for go-imap v2. This new major version is still in +> development. For go-imap v1, see the [v1 branch]. + +## Usage + +To add go-imap to your project, run: + + go get github.com/emersion/go-imap/v2 + +Documentation and examples for the module are available here: + +- [Client docs] +- [Server docs] + +## License + +MIT + +[IMAP4rev2]: https://www.rfc-editor.org/rfc/rfc9051.html +[v1 branch]: https://github.com/emersion/go-imap/tree/v1 +[Client docs]: https://pkg.go.dev/github.com/emersion/go-imap/v2/imapclient +[Server docs]: https://pkg.go.dev/github.com/emersion/go-imap/v2/imapserver diff --git a/vendor/github.com/emersion/go-imap/v2/acl.go b/vendor/github.com/emersion/go-imap/v2/acl.go new file mode 100644 index 0000000000..4d9431e9db --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/acl.go @@ -0,0 +1,104 @@ +package imap + +import ( + "fmt" + "strings" +) + +// IMAP4 ACL extension (RFC 2086) + +// Right describes a set of operations controlled by the IMAP ACL extension. +type Right byte + +const ( + // Standard rights + RightLookup = Right('l') // mailbox is visible to LIST/LSUB commands + RightRead = Right('r') // SELECT the mailbox, perform CHECK, FETCH, PARTIAL, SEARCH, COPY from mailbox + RightSeen = Right('s') // keep seen/unseen information across sessions (STORE SEEN flag) + RightWrite = Right('w') // STORE flags other than SEEN and DELETED + RightInsert = Right('i') // perform APPEND, COPY into mailbox + RightPost = Right('p') // send mail to submission address for mailbox, not enforced by IMAP4 itself + RightCreate = Right('c') // CREATE new sub-mailboxes in any implementation-defined hierarchy + RightDelete = Right('d') // STORE DELETED flag, perform EXPUNGE + RightAdminister = Right('a') // perform SETACL +) + +// RightSetAll contains all standard rights. +var RightSetAll = RightSet("lrswipcda") + +// RightsIdentifier is an ACL identifier. +type RightsIdentifier string + +// RightsIdentifierAnyone is the universal identity (matches everyone). +const RightsIdentifierAnyone = RightsIdentifier("anyone") + +// NewRightsIdentifierUsername returns a rights identifier referring to a +// username, checking for reserved values. +func NewRightsIdentifierUsername(username string) (RightsIdentifier, error) { + if username == string(RightsIdentifierAnyone) || strings.HasPrefix(username, "-") { + return "", fmt.Errorf("imap: reserved rights identifier") + } + return RightsIdentifier(username), nil +} + +// RightModification indicates how to mutate a right set. +type RightModification byte + +const ( + RightModificationReplace = RightModification(0) + RightModificationAdd = RightModification('+') + RightModificationRemove = RightModification('-') +) + +// A RightSet is a set of rights. +type RightSet []Right + +// String returns a string representation of the right set. +func (r RightSet) String() string { + return string(r) +} + +// Add returns a new right set containing rights from both sets. +func (r RightSet) Add(rights RightSet) RightSet { + newRights := make(RightSet, len(r), len(r)+len(rights)) + copy(newRights, r) + + for _, right := range rights { + if !strings.ContainsRune(string(r), rune(right)) { + newRights = append(newRights, right) + } + } + + return newRights +} + +// Remove returns a new right set containing all rights in r except these in +// the provided set. +func (r RightSet) Remove(rights RightSet) RightSet { + newRights := make(RightSet, 0, len(r)) + + for _, right := range r { + if !strings.ContainsRune(string(rights), rune(right)) { + newRights = append(newRights, right) + } + } + + return newRights +} + +// Equal returns true if both right sets contain exactly the same rights. +func (rs1 RightSet) Equal(rs2 RightSet) bool { + for _, r := range rs1 { + if !strings.ContainsRune(string(rs2), rune(r)) { + return false + } + } + + for _, r := range rs2 { + if !strings.ContainsRune(string(rs1), rune(r)) { + return false + } + } + + return true +} diff --git a/vendor/github.com/emersion/go-imap/v2/append.go b/vendor/github.com/emersion/go-imap/v2/append.go new file mode 100644 index 0000000000..13d887fba2 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/append.go @@ -0,0 +1,18 @@ +package imap + +import ( + "time" +) + +// AppendOptions contains options for the APPEND command. +type AppendOptions struct { + Flags []Flag + Time time.Time +} + +// AppendData is the data returned by an APPEND command. +type AppendData struct { + // requires UIDPLUS or IMAP4rev2 + UID UID + UIDValidity uint32 +} diff --git a/vendor/github.com/emersion/go-imap/v2/capability.go b/vendor/github.com/emersion/go-imap/v2/capability.go new file mode 100644 index 0000000000..0b84c5f24a --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/capability.go @@ -0,0 +1,205 @@ +package imap + +import ( + "strconv" + "strings" +) + +// Cap represents an IMAP capability. +type Cap string + +// Registered capabilities. +// +// See: https://www.iana.org/assignments/imap-capabilities/ +const ( + CapIMAP4rev1 Cap = "IMAP4rev1" // RFC 3501 + CapIMAP4rev2 Cap = "IMAP4rev2" // RFC 9051 + + CapAuthPlain Cap = "AUTH=PLAIN" + + CapStartTLS Cap = "STARTTLS" + CapLoginDisabled Cap = "LOGINDISABLED" + + // Folded in IMAP4rev2 + CapNamespace Cap = "NAMESPACE" // RFC 2342 + CapUnselect Cap = "UNSELECT" // RFC 3691 + CapUIDPlus Cap = "UIDPLUS" // RFC 4315 + CapESearch Cap = "ESEARCH" // RFC 4731 + CapSearchRes Cap = "SEARCHRES" // RFC 5182 + CapEnable Cap = "ENABLE" // RFC 5161 + CapIdle Cap = "IDLE" // RFC 2177 + CapSASLIR Cap = "SASL-IR" // RFC 4959 + CapListExtended Cap = "LIST-EXTENDED" // RFC 5258 + CapListStatus Cap = "LIST-STATUS" // RFC 5819 + CapMove Cap = "MOVE" // RFC 6851 + CapLiteralMinus Cap = "LITERAL-" // RFC 7888 + CapStatusSize Cap = "STATUS=SIZE" // RFC 8438 + + CapACL Cap = "ACL" // RFC 4314 + CapAppendLimit Cap = "APPENDLIMIT" // RFC 7889 + CapBinary Cap = "BINARY" // RFC 3516 + CapCatenate Cap = "CATENATE" // RFC 4469 + CapChildren Cap = "CHILDREN" // RFC 3348 + CapCondStore Cap = "CONDSTORE" // RFC 7162 + CapConvert Cap = "CONVERT" // RFC 5259 + CapCreateSpecialUse Cap = "CREATE-SPECIAL-USE" // RFC 6154 + CapESort Cap = "ESORT" // RFC 5267 + CapFilters Cap = "FILTERS" // RFC 5466 + CapID Cap = "ID" // RFC 2971 + CapLanguage Cap = "LANGUAGE" // RFC 5255 + CapListMyRights Cap = "LIST-MYRIGHTS" // RFC 8440 + CapLiteralPlus Cap = "LITERAL+" // RFC 7888 + CapLoginReferrals Cap = "LOGIN-REFERRALS" // RFC 2221 + CapMailboxReferrals Cap = "MAILBOX-REFERRALS" // RFC 2193 + CapMetadata Cap = "METADATA" // RFC 5464 + CapMetadataServer Cap = "METADATA-SERVER" // RFC 5464 + CapMultiAppend Cap = "MULTIAPPEND" // RFC 3502 + CapMultiSearch Cap = "MULTISEARCH" // RFC 7377 + CapNotify Cap = "NOTIFY" // RFC 5465 + CapObjectID Cap = "OBJECTID" // RFC 8474 + CapPreview Cap = "PREVIEW" // RFC 8970 + CapQResync Cap = "QRESYNC" // RFC 7162 + CapQuota Cap = "QUOTA" // RFC 9208 + CapQuotaSet Cap = "QUOTASET" // RFC 9208 + CapReplace Cap = "REPLACE" // RFC 8508 + CapSaveDate Cap = "SAVEDATE" // RFC 8514 + CapSearchFuzzy Cap = "SEARCH=FUZZY" // RFC 6203 + CapSort Cap = "SORT" // RFC 5256 + CapSortDisplay Cap = "SORT=DISPLAY" // RFC 5957 + CapSpecialUse Cap = "SPECIAL-USE" // RFC 6154 + CapUnauthenticate Cap = "UNAUTHENTICATE" // RFC 8437 + CapURLPartial Cap = "URL-PARTIAL" // RFC 5550 + CapURLAuth Cap = "URLAUTH" // RFC 4467 + CapUTF8Accept Cap = "UTF8=ACCEPT" // RFC 6855 + CapUTF8Only Cap = "UTF8=ONLY" // RFC 6855 + CapWithin Cap = "WITHIN" // RFC 5032 + CapUIDOnly Cap = "UIDONLY" // RFC 9586 + CapListMetadata Cap = "LIST-METADATA" // RFC 9590 + CapInProgress Cap = "INPROGRESS" // RFC 9585 +) + +var imap4rev2Caps = CapSet{ + CapNamespace: {}, + CapUnselect: {}, + CapUIDPlus: {}, + CapESearch: {}, + CapSearchRes: {}, + CapEnable: {}, + CapIdle: {}, + CapSASLIR: {}, + CapListExtended: {}, + CapListStatus: {}, + CapMove: {}, + CapLiteralMinus: {}, + CapStatusSize: {}, +} + +// AuthCap returns the capability name for an SASL authentication mechanism. +func AuthCap(mechanism string) Cap { + return Cap("AUTH=" + mechanism) +} + +// CapSet is a set of capabilities. +type CapSet map[Cap]struct{} + +func (set CapSet) has(c Cap) bool { + _, ok := set[c] + return ok +} + +// Has checks whether a capability is supported. +// +// Some capabilities are implied by others, as such Has may return true even if +// the capability is not in the map. +func (set CapSet) Has(c Cap) bool { + if set.has(c) { + return true + } + + if set.has(CapIMAP4rev2) && imap4rev2Caps.has(c) { + return true + } + + if c == CapLiteralMinus && set.has(CapLiteralPlus) { + return true + } + if c == CapCondStore && set.has(CapQResync) { + return true + } + if c == CapUTF8Accept && set.has(CapUTF8Only) { + return true + } + if c == CapAppendLimit { + _, ok := set.AppendLimit() + return ok + } + + return false +} + +// AuthMechanisms returns the list of supported SASL mechanisms for +// authentication. +func (set CapSet) AuthMechanisms() []string { + var l []string + for c := range set { + if !strings.HasPrefix(string(c), "AUTH=") { + continue + } + mech := strings.TrimPrefix(string(c), "AUTH=") + l = append(l, mech) + } + return l +} + +// AppendLimit checks the APPENDLIMIT capability. +// +// If the server supports APPENDLIMIT, ok is true. If the server doesn't have +// the same upload limit for all mailboxes, limit is nil and per-mailbox +// limits must be queried via STATUS. +func (set CapSet) AppendLimit() (limit *uint32, ok bool) { + if set.has(CapAppendLimit) { + return nil, true + } + + for c := range set { + if !strings.HasPrefix(string(c), "APPENDLIMIT=") { + continue + } + + limitStr := strings.TrimPrefix(string(c), "APPENDLIMIT=") + limit64, err := strconv.ParseUint(limitStr, 10, 32) + if err == nil && limit64 > 0 { + limit32 := uint32(limit64) + return &limit32, true + } + } + + limit32 := ^uint32(0) + return &limit32, false +} + +// QuotaResourceTypes returns the list of supported QUOTA resource types. +func (set CapSet) QuotaResourceTypes() []QuotaResourceType { + var l []QuotaResourceType + for c := range set { + if !strings.HasPrefix(string(c), "QUOTA=RES-") { + continue + } + t := strings.TrimPrefix(string(c), "QUOTA=RES-") + l = append(l, QuotaResourceType(t)) + } + return l +} + +// ThreadAlgorithms returns the list of supported threading algorithms. +func (set CapSet) ThreadAlgorithms() []ThreadAlgorithm { + var l []ThreadAlgorithm + for c := range set { + if !strings.HasPrefix(string(c), "THREAD=") { + continue + } + alg := strings.TrimPrefix(string(c), "THREAD=") + l = append(l, ThreadAlgorithm(alg)) + } + return l +} diff --git a/vendor/github.com/emersion/go-imap/v2/copy.go b/vendor/github.com/emersion/go-imap/v2/copy.go new file mode 100644 index 0000000000..f685a6092d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/copy.go @@ -0,0 +1,9 @@ +package imap + +// CopyData is the data returned by a COPY command. +type CopyData struct { + // requires UIDPLUS or IMAP4rev2 + UIDValidity uint32 + SourceUIDs UIDSet + DestUIDs UIDSet +} diff --git a/vendor/github.com/emersion/go-imap/v2/create.go b/vendor/github.com/emersion/go-imap/v2/create.go new file mode 100644 index 0000000000..09e8bc4a76 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/create.go @@ -0,0 +1,6 @@ +package imap + +// CreateOptions contains options for the CREATE command. +type CreateOptions struct { + SpecialUse []MailboxAttr // requires CREATE-SPECIAL-USE +} diff --git a/vendor/github.com/emersion/go-imap/v2/fetch.go b/vendor/github.com/emersion/go-imap/v2/fetch.go new file mode 100644 index 0000000000..f146c89712 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/fetch.go @@ -0,0 +1,284 @@ +package imap + +import ( + "fmt" + "strings" + "time" +) + +// FetchOptions contains options for the FETCH command. +type FetchOptions struct { + // Fields to fetch + BodyStructure *FetchItemBodyStructure + Envelope bool + Flags bool + InternalDate bool + RFC822Size bool + UID bool + BodySection []*FetchItemBodySection + BinarySection []*FetchItemBinarySection // requires IMAP4rev2 or BINARY + BinarySectionSize []*FetchItemBinarySectionSize // requires IMAP4rev2 or BINARY + ModSeq bool // requires CONDSTORE + + ChangedSince uint64 // requires CONDSTORE +} + +// FetchItemBodyStructure contains FETCH options for the body structure. +type FetchItemBodyStructure struct { + Extended bool +} + +// PartSpecifier describes whether to fetch a part's header, body, or both. +type PartSpecifier string + +const ( + PartSpecifierNone PartSpecifier = "" + PartSpecifierHeader PartSpecifier = "HEADER" + PartSpecifierMIME PartSpecifier = "MIME" + PartSpecifierText PartSpecifier = "TEXT" +) + +// SectionPartial describes a byte range when fetching a message's payload. +type SectionPartial struct { + Offset, Size int64 +} + +// FetchItemBodySection is a FETCH BODY[] data item. +// +// To fetch the whole body of a message, use the zero FetchItemBodySection: +// +// imap.FetchItemBodySection{} +// +// To fetch only a specific part, use the Part field: +// +// imap.FetchItemBodySection{Part: []int{1, 2, 3}} +// +// To fetch only the header of the message, use the Specifier field: +// +// imap.FetchItemBodySection{Specifier: imap.PartSpecifierHeader} +type FetchItemBodySection struct { + Specifier PartSpecifier + Part []int + HeaderFields []string + HeaderFieldsNot []string + Partial *SectionPartial + Peek bool +} + +// FetchItemBinarySection is a FETCH BINARY[] data item. +type FetchItemBinarySection struct { + Part []int + Partial *SectionPartial + Peek bool +} + +// FetchItemBinarySectionSize is a FETCH BINARY.SIZE[] data item. +type FetchItemBinarySectionSize struct { + Part []int +} + +// Envelope is the envelope structure of a message. +// +// The subject and addresses are UTF-8 (ie, not in their encoded form). The +// In-Reply-To and Message-ID values contain message identifiers without angle +// brackets. +type Envelope struct { + Date time.Time + Subject string + From []Address + Sender []Address + ReplyTo []Address + To []Address + Cc []Address + Bcc []Address + InReplyTo []string + MessageID string +} + +// Address represents a sender or recipient of a message. +type Address struct { + Name string + Mailbox string + Host string +} + +// Addr returns the e-mail address in the form "foo@example.org". +// +// If the address is a start or end of group, the empty string is returned. +func (addr *Address) Addr() string { + if addr.Mailbox == "" || addr.Host == "" { + return "" + } + return addr.Mailbox + "@" + addr.Host +} + +// IsGroupStart returns true if this address is a start of group marker. +// +// In that case, Mailbox contains the group name phrase. +func (addr *Address) IsGroupStart() bool { + return addr.Host == "" && addr.Mailbox != "" +} + +// IsGroupEnd returns true if this address is a end of group marker. +func (addr *Address) IsGroupEnd() bool { + return addr.Host == "" && addr.Mailbox == "" +} + +// BodyStructure describes the body structure of a message. +// +// A BodyStructure value is either a *BodyStructureSinglePart or a +// *BodyStructureMultiPart. +type BodyStructure interface { + // MediaType returns the MIME type of this body structure, e.g. "text/plain". + MediaType() string + // Walk walks the body structure tree, calling f for each part in the tree, + // including bs itself. The parts are visited in DFS pre-order. + Walk(f BodyStructureWalkFunc) + // Disposition returns the body structure disposition, if available. + Disposition() *BodyStructureDisposition + + bodyStructure() +} + +var ( + _ BodyStructure = (*BodyStructureSinglePart)(nil) + _ BodyStructure = (*BodyStructureMultiPart)(nil) +) + +// BodyStructureSinglePart is a body structure with a single part. +type BodyStructureSinglePart struct { + Type, Subtype string + Params map[string]string + ID string + Description string + Encoding string + Size uint32 + + MessageRFC822 *BodyStructureMessageRFC822 // only for "message/rfc822" + Text *BodyStructureText // only for "text/*" + Extended *BodyStructureSinglePartExt +} + +func (bs *BodyStructureSinglePart) MediaType() string { + return strings.ToLower(bs.Type) + "/" + strings.ToLower(bs.Subtype) +} + +func (bs *BodyStructureSinglePart) Walk(f BodyStructureWalkFunc) { + f([]int{1}, bs) +} + +func (bs *BodyStructureSinglePart) Disposition() *BodyStructureDisposition { + if bs.Extended == nil { + return nil + } + return bs.Extended.Disposition +} + +// Filename decodes the body structure's filename, if any. +func (bs *BodyStructureSinglePart) Filename() string { + var filename string + if bs.Extended != nil && bs.Extended.Disposition != nil { + filename = bs.Extended.Disposition.Params["filename"] + } + if filename == "" { + // Note: using "name" in Content-Type is discouraged + filename = bs.Params["name"] + } + return filename +} + +func (*BodyStructureSinglePart) bodyStructure() {} + +// BodyStructureMessageRFC822 contains metadata specific to RFC 822 parts for +// BodyStructureSinglePart. +type BodyStructureMessageRFC822 struct { + Envelope *Envelope + BodyStructure BodyStructure + NumLines int64 +} + +// BodyStructureText contains metadata specific to text parts for +// BodyStructureSinglePart. +type BodyStructureText struct { + NumLines int64 +} + +// BodyStructureSinglePartExt contains extended body structure data for +// BodyStructureSinglePart. +type BodyStructureSinglePartExt struct { + Disposition *BodyStructureDisposition + Language []string + Location string +} + +// BodyStructureMultiPart is a body structure with multiple parts. +type BodyStructureMultiPart struct { + Children []BodyStructure + Subtype string + + Extended *BodyStructureMultiPartExt +} + +func (bs *BodyStructureMultiPart) MediaType() string { + return "multipart/" + strings.ToLower(bs.Subtype) +} + +func (bs *BodyStructureMultiPart) Walk(f BodyStructureWalkFunc) { + bs.walk(f, nil) +} + +func (bs *BodyStructureMultiPart) walk(f BodyStructureWalkFunc, path []int) { + if !f(path, bs) { + return + } + + pathBuf := make([]int, len(path)) + copy(pathBuf, path) + for i, part := range bs.Children { + num := i + 1 + partPath := append(pathBuf, num) + + switch part := part.(type) { + case *BodyStructureSinglePart: + f(partPath, part) + case *BodyStructureMultiPart: + part.walk(f, partPath) + default: + panic(fmt.Errorf("unsupported body structure type %T", part)) + } + } +} + +func (bs *BodyStructureMultiPart) Disposition() *BodyStructureDisposition { + if bs.Extended == nil { + return nil + } + return bs.Extended.Disposition +} + +func (*BodyStructureMultiPart) bodyStructure() {} + +// BodyStructureMultiPartExt contains extended body structure data for +// BodyStructureMultiPart. +type BodyStructureMultiPartExt struct { + Params map[string]string + Disposition *BodyStructureDisposition + Language []string + Location string +} + +// BodyStructureDisposition describes the content disposition of a part +// (specified in the Content-Disposition header field). +type BodyStructureDisposition struct { + Value string + Params map[string]string +} + +// BodyStructureWalkFunc is a function called for each body structure visited +// by BodyStructure.Walk. +// +// The path argument contains the IMAP part path. +// +// The function should return true to visit all of the part's children or false +// to skip them. +type BodyStructureWalkFunc func(path []int, part BodyStructure) (walkChildren bool) diff --git a/vendor/github.com/emersion/go-imap/v2/id.go b/vendor/github.com/emersion/go-imap/v2/id.go new file mode 100644 index 0000000000..de7ca0e12c --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/id.go @@ -0,0 +1,15 @@ +package imap + +type IDData struct { + Name string + Version string + OS string + OSVersion string + Vendor string + SupportURL string + Address string + Date string + Command string + Arguments string + Environment string +} diff --git a/vendor/github.com/emersion/go-imap/v2/imap.go b/vendor/github.com/emersion/go-imap/v2/imap.go new file mode 100644 index 0000000000..7b4335718d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imap.go @@ -0,0 +1,105 @@ +// Package imap implements IMAP4rev2. +// +// IMAP4rev2 is defined in RFC 9051. +// +// This package contains types and functions common to both the client and +// server. See the imapclient and imapserver sub-packages. +package imap + +import ( + "fmt" + "io" +) + +// ConnState describes the connection state. +// +// See RFC 9051 section 3. +type ConnState int + +const ( + ConnStateNone ConnState = iota + ConnStateNotAuthenticated + ConnStateAuthenticated + ConnStateSelected + ConnStateLogout +) + +// String implements fmt.Stringer. +func (state ConnState) String() string { + switch state { + case ConnStateNone: + return "none" + case ConnStateNotAuthenticated: + return "not authenticated" + case ConnStateAuthenticated: + return "authenticated" + case ConnStateSelected: + return "selected" + case ConnStateLogout: + return "logout" + default: + panic(fmt.Errorf("imap: unknown connection state %v", int(state))) + } +} + +// MailboxAttr is a mailbox attribute. +// +// Mailbox attributes are defined in RFC 9051 section 7.3.1. +type MailboxAttr string + +const ( + // Base attributes + MailboxAttrNonExistent MailboxAttr = "\\NonExistent" + MailboxAttrNoInferiors MailboxAttr = "\\Noinferiors" + MailboxAttrNoSelect MailboxAttr = "\\Noselect" + MailboxAttrHasChildren MailboxAttr = "\\HasChildren" + MailboxAttrHasNoChildren MailboxAttr = "\\HasNoChildren" + MailboxAttrMarked MailboxAttr = "\\Marked" + MailboxAttrUnmarked MailboxAttr = "\\Unmarked" + MailboxAttrSubscribed MailboxAttr = "\\Subscribed" + MailboxAttrRemote MailboxAttr = "\\Remote" + + // Role (aka. "special-use") attributes + MailboxAttrAll MailboxAttr = "\\All" + MailboxAttrArchive MailboxAttr = "\\Archive" + MailboxAttrDrafts MailboxAttr = "\\Drafts" + MailboxAttrFlagged MailboxAttr = "\\Flagged" + MailboxAttrJunk MailboxAttr = "\\Junk" + MailboxAttrSent MailboxAttr = "\\Sent" + MailboxAttrTrash MailboxAttr = "\\Trash" + MailboxAttrImportant MailboxAttr = "\\Important" // RFC 8457 +) + +// Flag is a message flag. +// +// Message flags are defined in RFC 9051 section 2.3.2. +type Flag string + +const ( + // System flags + FlagSeen Flag = "\\Seen" + FlagAnswered Flag = "\\Answered" + FlagFlagged Flag = "\\Flagged" + FlagDeleted Flag = "\\Deleted" + FlagDraft Flag = "\\Draft" + + // Widely used flags + FlagForwarded Flag = "$Forwarded" + FlagMDNSent Flag = "$MDNSent" // Message Disposition Notification sent + FlagJunk Flag = "$Junk" + FlagNotJunk Flag = "$NotJunk" + FlagPhishing Flag = "$Phishing" + FlagImportant Flag = "$Important" // RFC 8457 + + // Permanent flags + FlagWildcard Flag = "\\*" +) + +// LiteralReader is a reader for IMAP literals. +type LiteralReader interface { + io.Reader + Size() int64 +} + +// UID is a message unique identifier. +type UID uint32 diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/acl.go b/vendor/github.com/emersion/go-imap/v2/imapclient/acl.go new file mode 100644 index 0000000000..b20be3b79d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/acl.go @@ -0,0 +1,138 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// MyRights sends a MYRIGHTS command. +// +// This command requires support for the ACL extension. +func (c *Client) MyRights(mailbox string) *MyRightsCommand { + cmd := &MyRightsCommand{} + enc := c.beginCommand("MYRIGHTS", cmd) + enc.SP().Mailbox(mailbox) + enc.end() + return cmd +} + +// SetACL sends a SETACL command. +// +// This command requires support for the ACL extension. +func (c *Client) SetACL(mailbox string, ri imap.RightsIdentifier, rm imap.RightModification, rs imap.RightSet) *SetACLCommand { + cmd := &SetACLCommand{} + enc := c.beginCommand("SETACL", cmd) + enc.SP().Mailbox(mailbox).SP().String(string(ri)).SP() + enc.String(internal.FormatRights(rm, rs)) + enc.end() + return cmd +} + +// SetACLCommand is a SETACL command. +type SetACLCommand struct { + commandBase +} + +func (cmd *SetACLCommand) Wait() error { + return cmd.wait() +} + +// GetACL sends a GETACL command. +// +// This command requires support for the ACL extension. +func (c *Client) GetACL(mailbox string) *GetACLCommand { + cmd := &GetACLCommand{} + enc := c.beginCommand("GETACL", cmd) + enc.SP().Mailbox(mailbox) + enc.end() + return cmd +} + +// GetACLCommand is a GETACL command. +type GetACLCommand struct { + commandBase + data GetACLData +} + +func (cmd *GetACLCommand) Wait() (*GetACLData, error) { + return &cmd.data, cmd.wait() +} + +func (c *Client) handleMyRights() error { + data, err := readMyRights(c.dec) + if err != nil { + return fmt.Errorf("in myrights-response: %v", err) + } + if cmd := findPendingCmdByType[*MyRightsCommand](c); cmd != nil { + cmd.data = *data + } + return nil +} + +func (c *Client) handleGetACL() error { + data, err := readGetACL(c.dec) + if err != nil { + return fmt.Errorf("in getacl-response: %v", err) + } + if cmd := findPendingCmdByType[*GetACLCommand](c); cmd != nil { + cmd.data = *data + } + return nil +} + +// MyRightsCommand is a MYRIGHTS command. +type MyRightsCommand struct { + commandBase + data MyRightsData +} + +func (cmd *MyRightsCommand) Wait() (*MyRightsData, error) { + return &cmd.data, cmd.wait() +} + +// MyRightsData is the data returned by the MYRIGHTS command. +type MyRightsData struct { + Mailbox string + Rights imap.RightSet +} + +func readMyRights(dec *imapwire.Decoder) (*MyRightsData, error) { + var ( + rights string + data MyRightsData + ) + if !dec.ExpectMailbox(&data.Mailbox) || !dec.ExpectSP() || !dec.ExpectAString(&rights) { + return nil, dec.Err() + } + + data.Rights = imap.RightSet(rights) + return &data, nil +} + +// GetACLData is the data returned by the GETACL command. +type GetACLData struct { + Mailbox string + Rights map[imap.RightsIdentifier]imap.RightSet +} + +func readGetACL(dec *imapwire.Decoder) (*GetACLData, error) { + data := &GetACLData{Rights: make(map[imap.RightsIdentifier]imap.RightSet)} + + if !dec.ExpectMailbox(&data.Mailbox) { + return nil, dec.Err() + } + + for dec.SP() { + var rsStr, riStr string + if !dec.ExpectAString(&riStr) || !dec.ExpectSP() || !dec.ExpectAString(&rsStr) { + return nil, dec.Err() + } + + data.Rights[imap.RightsIdentifier(riStr)] = imap.RightSet(rsStr) + } + + return data, nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/append.go b/vendor/github.com/emersion/go-imap/v2/imapclient/append.go new file mode 100644 index 0000000000..5bfff23572 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/append.go @@ -0,0 +1,58 @@ +package imapclient + +import ( + "io" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" +) + +// Append sends an APPEND command. +// +// The caller must call AppendCommand.Close. +// +// The options are optional. +func (c *Client) Append(mailbox string, size int64, options *imap.AppendOptions) *AppendCommand { + cmd := &AppendCommand{} + cmd.enc = c.beginCommand("APPEND", cmd) + cmd.enc.SP().Mailbox(mailbox).SP() + if options != nil && len(options.Flags) > 0 { + cmd.enc.List(len(options.Flags), func(i int) { + cmd.enc.Flag(options.Flags[i]) + }).SP() + } + if options != nil && !options.Time.IsZero() { + cmd.enc.String(options.Time.Format(internal.DateTimeLayout)).SP() + } + // TODO: literal8 for BINARY + // TODO: UTF8 data ext for UTF8=ACCEPT, with literal8 + cmd.wc = cmd.enc.Literal(size) + return cmd +} + +// AppendCommand is an APPEND command. +// +// Callers must write the message contents, then call Close. +type AppendCommand struct { + commandBase + enc *commandEncoder + wc io.WriteCloser + data imap.AppendData +} + +func (cmd *AppendCommand) Write(b []byte) (int, error) { + return cmd.wc.Write(b) +} + +func (cmd *AppendCommand) Close() error { + err := cmd.wc.Close() + if cmd.enc != nil { + cmd.enc.end() + cmd.enc = nil + } + return err +} + +func (cmd *AppendCommand) Wait() (*imap.AppendData, error) { + return &cmd.data, cmd.wait() +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/authenticate.go b/vendor/github.com/emersion/go-imap/v2/imapclient/authenticate.go new file mode 100644 index 0000000000..e0f67d0dc0 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/authenticate.go @@ -0,0 +1,100 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-sasl" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" +) + +// Authenticate sends an AUTHENTICATE command. +// +// Unlike other commands, this method blocks until the SASL exchange completes. +func (c *Client) Authenticate(saslClient sasl.Client) error { + mech, initialResp, err := saslClient.Start() + if err != nil { + return err + } + + // c.Caps may send a CAPABILITY command, so check it before c.beginCommand + var hasSASLIR bool + if initialResp != nil { + hasSASLIR = c.Caps().Has(imap.CapSASLIR) + } + + cmd := &authenticateCommand{} + contReq := c.registerContReq(cmd) + enc := c.beginCommand("AUTHENTICATE", cmd) + enc.SP().Atom(mech) + if initialResp != nil && hasSASLIR { + enc.SP().Atom(internal.EncodeSASL(initialResp)) + initialResp = nil + } + enc.flush() + defer enc.end() + + for { + challengeStr, err := contReq.Wait() + if err != nil { + return cmd.wait() + } + + if challengeStr == "" { + if initialResp == nil { + return fmt.Errorf("imapclient: server requested SASL initial response, but we don't have one") + } + + contReq = c.registerContReq(cmd) + if err := c.writeSASLResp(initialResp); err != nil { + return err + } + initialResp = nil + continue + } + + challenge, err := internal.DecodeSASL(challengeStr) + if err != nil { + return err + } + + resp, err := saslClient.Next(challenge) + if err != nil { + return err + } + + contReq = c.registerContReq(cmd) + if err := c.writeSASLResp(resp); err != nil { + return err + } + } +} + +type authenticateCommand struct { + commandBase +} + +func (c *Client) writeSASLResp(resp []byte) error { + respStr := internal.EncodeSASL(resp) + if _, err := c.bw.WriteString(respStr + "\r\n"); err != nil { + return err + } + if err := c.bw.Flush(); err != nil { + return err + } + return nil +} + +// Unauthenticate sends an UNAUTHENTICATE command. +// +// This command requires support for the UNAUTHENTICATE extension. +func (c *Client) Unauthenticate() *Command { + cmd := &unauthenticateCommand{} + c.beginCommand("UNAUTHENTICATE", cmd).end() + return &cmd.Command +} + +type unauthenticateCommand struct { + Command +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/capability.go b/vendor/github.com/emersion/go-imap/v2/imapclient/capability.go new file mode 100644 index 0000000000..5e028f1b83 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/capability.go @@ -0,0 +1,55 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// Capability sends a CAPABILITY command. +func (c *Client) Capability() *CapabilityCommand { + cmd := &CapabilityCommand{} + c.beginCommand("CAPABILITY", cmd).end() + return cmd +} + +func (c *Client) handleCapability() error { + caps, err := readCapabilities(c.dec) + if err != nil { + return err + } + c.setCaps(caps) + if cmd := findPendingCmdByType[*CapabilityCommand](c); cmd != nil { + cmd.caps = caps + } + return nil +} + +// CapabilityCommand is a CAPABILITY command. +type CapabilityCommand struct { + commandBase + caps imap.CapSet +} + +func (cmd *CapabilityCommand) Wait() (imap.CapSet, error) { + err := cmd.wait() + return cmd.caps, err +} + +func readCapabilities(dec *imapwire.Decoder) (imap.CapSet, error) { + caps := make(imap.CapSet) + for dec.SP() { + // Some IMAP servers send multiple SP between caps: + // https://github.com/emersion/go-imap/pull/652 + for dec.SP() { + } + + var name string + if !dec.ExpectAtom(&name) { + return caps, fmt.Errorf("in capability-data: %v", dec.Err()) + } + caps[imap.Cap(name)] = struct{}{} + } + return caps, nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/client.go b/vendor/github.com/emersion/go-imap/v2/imapclient/client.go new file mode 100644 index 0000000000..4bdd5c3ffd --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/client.go @@ -0,0 +1,1215 @@ +// Package imapclient implements an IMAP client. +// +// # Charset decoding +// +// By default, only basic charset decoding is performed. For non-UTF-8 decoding +// of message subjects and e-mail address names, users can set +// Options.WordDecoder. For instance, to use go-message's collection of +// charsets: +// +// import ( +// "mime" +// +// "github.com/emersion/go-message/charset" +// ) +// +// options := &imapclient.Options{ +// WordDecoder: &mime.WordDecoder{CharsetReader: charset.Reader}, +// } +// client, err := imapclient.DialTLS("imap.example.org:993", options) +package imapclient + +import ( + "bufio" + "crypto/tls" + "errors" + "fmt" + "io" + "mime" + "net" + "runtime/debug" + "strconv" + "strings" + "sync" + "time" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +const ( + idleReadTimeout = time.Duration(0) + respReadTimeout = 30 * time.Second + literalReadTimeout = 5 * time.Minute + + cmdWriteTimeout = 30 * time.Second + literalWriteTimeout = 5 * time.Minute +) + +var dialer = &net.Dialer{ + Timeout: 30 * time.Second, +} + +// SelectedMailbox contains metadata for the currently selected mailbox. +type SelectedMailbox struct { + Name string + NumMessages uint32 + Flags []imap.Flag + PermanentFlags []imap.Flag +} + +func (mbox *SelectedMailbox) copy() *SelectedMailbox { + copy := *mbox + return © +} + +// Options contains options for Client. +type Options struct { + // TLS configuration for use by DialTLS and DialStartTLS. If nil, the + // default configuration is used. + TLSConfig *tls.Config + // Raw ingress and egress data will be written to this writer, if any. + // Note, this may include sensitive information such as credentials used + // during authentication. + DebugWriter io.Writer + // Unilateral data handler. + UnilateralDataHandler *UnilateralDataHandler + // Decoder for RFC 2047 words. + WordDecoder *mime.WordDecoder +} + +func (options *Options) wrapReadWriter(rw io.ReadWriter) io.ReadWriter { + if options.DebugWriter == nil { + return rw + } + return struct { + io.Reader + io.Writer + }{ + Reader: io.TeeReader(rw, options.DebugWriter), + Writer: io.MultiWriter(rw, options.DebugWriter), + } +} + +func (options *Options) decodeText(s string) (string, error) { + wordDecoder := options.WordDecoder + if wordDecoder == nil { + wordDecoder = &mime.WordDecoder{} + } + out, err := wordDecoder.DecodeHeader(s) + if err != nil { + return s, err + } + return out, nil +} + +func (options *Options) unilateralDataHandler() *UnilateralDataHandler { + if options.UnilateralDataHandler == nil { + return &UnilateralDataHandler{} + } + return options.UnilateralDataHandler +} + +func (options *Options) tlsConfig() *tls.Config { + if options != nil && options.TLSConfig != nil { + return options.TLSConfig.Clone() + } else { + return new(tls.Config) + } +} + +// Client is an IMAP client. +// +// IMAP commands are exposed as methods. These methods will block until the +// command has been sent to the server, but won't block until the server sends +// a response. They return a command struct which can be used to wait for the +// server response. This can be used to execute multiple commands concurrently, +// however care must be taken to avoid ambiguities. See RFC 9051 section 5.5. +// +// A client can be safely used from multiple goroutines, however this doesn't +// guarantee any command ordering and is subject to the same caveats as command +// pipelining (see above). Additionally, some commands (e.g. StartTLS, +// Authenticate, Idle) block the client during their execution. +type Client struct { + conn net.Conn + options Options + br *bufio.Reader + bw *bufio.Writer + dec *imapwire.Decoder + encMutex sync.Mutex + + greetingCh chan struct{} + greetingRecv bool + greetingErr error + + decCh chan struct{} + decErr error + + mutex sync.Mutex + state imap.ConnState + caps imap.CapSet + enabled imap.CapSet + pendingCapCh chan struct{} + mailbox *SelectedMailbox + cmdTag uint64 + pendingCmds []command + contReqs []continuationRequest + closed bool +} + +// New creates a new IMAP client. +// +// This function doesn't perform I/O. +// +// A nil options pointer is equivalent to a zero options value. +func New(conn net.Conn, options *Options) *Client { + if options == nil { + options = &Options{} + } + + rw := options.wrapReadWriter(conn) + br := bufio.NewReader(rw) + bw := bufio.NewWriter(rw) + + client := &Client{ + conn: conn, + options: *options, + br: br, + bw: bw, + dec: imapwire.NewDecoder(br, imapwire.ConnSideClient), + greetingCh: make(chan struct{}), + decCh: make(chan struct{}), + state: imap.ConnStateNone, + enabled: make(imap.CapSet), + } + go client.read() + return client +} + +// NewStartTLS creates a new IMAP client with STARTTLS. +// +// A nil options pointer is equivalent to a zero options value. +func NewStartTLS(conn net.Conn, options *Options) (*Client, error) { + if options == nil { + options = &Options{} + } + + client := New(conn, options) + if err := client.startTLS(options.TLSConfig); err != nil { + conn.Close() + return nil, err + } + + // Per section 7.1.4, refuse PREAUTH when using STARTTLS + if client.State() != imap.ConnStateNotAuthenticated { + client.Close() + return nil, fmt.Errorf("imapclient: server sent PREAUTH on unencrypted connection") + } + + return client, nil +} + +// DialInsecure connects to an IMAP server without any encryption at all. +func DialInsecure(address string, options *Options) (*Client, error) { + conn, err := net.Dial("tcp", address) + if err != nil { + return nil, err + } + return New(conn, options), nil +} + +// DialTLS connects to an IMAP server with implicit TLS. +func DialTLS(address string, options *Options) (*Client, error) { + tlsConfig := options.tlsConfig() + if tlsConfig.NextProtos == nil { + tlsConfig.NextProtos = []string{"imap"} + } + + conn, err := tls.DialWithDialer(dialer, "tcp", address, tlsConfig) + if err != nil { + return nil, err + } + return New(conn, options), nil +} + +// DialStartTLS connects to an IMAP server with STARTTLS. +func DialStartTLS(address string, options *Options) (*Client, error) { + if options == nil { + options = &Options{} + } + + host, _, err := net.SplitHostPort(address) + if err != nil { + return nil, err + } + + conn, err := dialer.Dial("tcp", address) + if err != nil { + return nil, err + } + + tlsConfig := options.tlsConfig() + if tlsConfig.ServerName == "" { + tlsConfig.ServerName = host + } + newOptions := *options + newOptions.TLSConfig = tlsConfig + return NewStartTLS(conn, &newOptions) +} + +func (c *Client) setReadTimeout(dur time.Duration) { + if dur > 0 { + c.conn.SetReadDeadline(time.Now().Add(dur)) + } else { + c.conn.SetReadDeadline(time.Time{}) + } +} + +func (c *Client) setWriteTimeout(dur time.Duration) { + if dur > 0 { + c.conn.SetWriteDeadline(time.Now().Add(dur)) + } else { + c.conn.SetWriteDeadline(time.Time{}) + } +} + +// State returns the current connection state of the client. +func (c *Client) State() imap.ConnState { + c.mutex.Lock() + defer c.mutex.Unlock() + return c.state +} + +func (c *Client) setState(state imap.ConnState) { + c.mutex.Lock() + c.state = state + if c.state != imap.ConnStateSelected { + c.mailbox = nil + } + c.mutex.Unlock() +} + +// Caps returns the capabilities advertised by the server. +// +// When the server hasn't sent the capability list, this method will request it +// and block until it's received. If the capabilities cannot be fetched, nil is +// returned. +func (c *Client) Caps() imap.CapSet { + if err := c.WaitGreeting(); err != nil { + return nil + } + + c.mutex.Lock() + caps := c.caps + capCh := c.pendingCapCh + c.mutex.Unlock() + + if caps != nil { + return caps + } + + if capCh == nil { + capCmd := c.Capability() + capCh := make(chan struct{}) + go func() { + capCmd.Wait() + close(capCh) + }() + c.mutex.Lock() + c.pendingCapCh = capCh + c.mutex.Unlock() + } + + timer := time.NewTimer(respReadTimeout) + defer timer.Stop() + select { + case <-timer.C: + return nil + case <-capCh: + // ok + } + + // TODO: this is racy if caps are reset before we get the reply + c.mutex.Lock() + defer c.mutex.Unlock() + return c.caps +} + +func (c *Client) setCaps(caps imap.CapSet) { + // If the capabilities are being reset, request the updated capabilities + // from the server + var capCh chan struct{} + if caps == nil { + capCh = make(chan struct{}) + + // We need to send the CAPABILITY command in a separate goroutine: + // setCaps might be called with Client.encMutex locked + go func() { + c.Capability().Wait() + close(capCh) + }() + } + + c.mutex.Lock() + c.caps = caps + c.pendingCapCh = capCh + c.mutex.Unlock() +} + +// Mailbox returns the state of the currently selected mailbox. +// +// If there is no currently selected mailbox, nil is returned. +// +// The returned struct must not be mutated. +func (c *Client) Mailbox() *SelectedMailbox { + c.mutex.Lock() + defer c.mutex.Unlock() + return c.mailbox +} + +// Close immediately closes the connection. +func (c *Client) Close() error { + c.mutex.Lock() + alreadyClosed := c.closed + c.closed = true + c.mutex.Unlock() + + // Ignore net.ErrClosed here, because we also call conn.Close in c.read + if err := c.conn.Close(); err != nil && !errors.Is(err, net.ErrClosed) && !errors.Is(err, io.ErrClosedPipe) { + return err + } + + <-c.decCh + if err := c.decErr; err != nil { + return err + } + + if alreadyClosed { + return net.ErrClosed + } + return nil +} + +// beginCommand starts sending a command to the server. +// +// The command name and a space are written. +// +// The caller must call commandEncoder.end. +func (c *Client) beginCommand(name string, cmd command) *commandEncoder { + c.encMutex.Lock() // unlocked by commandEncoder.end + + c.mutex.Lock() + + c.cmdTag++ + tag := fmt.Sprintf("T%v", c.cmdTag) + + baseCmd := cmd.base() + *baseCmd = commandBase{ + tag: tag, + done: make(chan error, 1), + } + + c.pendingCmds = append(c.pendingCmds, cmd) + quotedUTF8 := c.caps.Has(imap.CapIMAP4rev2) || c.enabled.Has(imap.CapUTF8Accept) + literalMinus := c.caps.Has(imap.CapLiteralMinus) + literalPlus := c.caps.Has(imap.CapLiteralPlus) + + c.mutex.Unlock() + + c.setWriteTimeout(cmdWriteTimeout) + + wireEnc := imapwire.NewEncoder(c.bw, imapwire.ConnSideClient) + wireEnc.QuotedUTF8 = quotedUTF8 + wireEnc.LiteralMinus = literalMinus + wireEnc.LiteralPlus = literalPlus + wireEnc.NewContinuationRequest = func() *imapwire.ContinuationRequest { + return c.registerContReq(cmd) + } + + enc := &commandEncoder{ + Encoder: wireEnc, + client: c, + cmd: baseCmd, + } + enc.Atom(tag).SP().Atom(name) + return enc +} + +func (c *Client) deletePendingCmdByTag(tag string) command { + c.mutex.Lock() + defer c.mutex.Unlock() + + for i, cmd := range c.pendingCmds { + if cmd.base().tag == tag { + c.pendingCmds = append(c.pendingCmds[:i], c.pendingCmds[i+1:]...) + return cmd + } + } + return nil +} + +func (c *Client) findPendingCmdFunc(f func(cmd command) bool) command { + c.mutex.Lock() + defer c.mutex.Unlock() + + for _, cmd := range c.pendingCmds { + if f(cmd) { + return cmd + } + } + return nil +} + +func findPendingCmdByType[T command](c *Client) T { + c.mutex.Lock() + defer c.mutex.Unlock() + + for _, cmd := range c.pendingCmds { + if cmd, ok := cmd.(T); ok { + return cmd + } + } + + var cmd T + return cmd +} + +func (c *Client) completeCommand(cmd command, err error) { + done := cmd.base().done + done <- err + close(done) + + // Ensure the command is not blocked waiting on continuation requests + c.mutex.Lock() + var filtered []continuationRequest + for _, contReq := range c.contReqs { + if contReq.cmd != cmd.base() { + filtered = append(filtered, contReq) + } else { + contReq.Cancel(err) + } + } + c.contReqs = filtered + c.mutex.Unlock() + + switch cmd := cmd.(type) { + case *authenticateCommand, *loginCommand: + if err == nil { + c.setState(imap.ConnStateAuthenticated) + } + case *unauthenticateCommand: + if err == nil { + c.mutex.Lock() + c.state = imap.ConnStateNotAuthenticated + c.mailbox = nil + c.enabled = make(imap.CapSet) + c.mutex.Unlock() + } + case *SelectCommand: + if err == nil { + c.mutex.Lock() + c.state = imap.ConnStateSelected + c.mailbox = &SelectedMailbox{ + Name: cmd.mailbox, + NumMessages: cmd.data.NumMessages, + Flags: cmd.data.Flags, + PermanentFlags: cmd.data.PermanentFlags, + } + c.mutex.Unlock() + } + case *unselectCommand: + if err == nil { + c.setState(imap.ConnStateAuthenticated) + } + case *logoutCommand: + if err == nil { + c.setState(imap.ConnStateLogout) + } + case *ListCommand: + if cmd.pendingData != nil { + cmd.mailboxes <- cmd.pendingData + } + close(cmd.mailboxes) + case *FetchCommand: + close(cmd.msgs) + case *ExpungeCommand: + close(cmd.seqNums) + } +} + +func (c *Client) registerContReq(cmd command) *imapwire.ContinuationRequest { + contReq := imapwire.NewContinuationRequest() + + c.mutex.Lock() + c.contReqs = append(c.contReqs, continuationRequest{ + ContinuationRequest: contReq, + cmd: cmd.base(), + }) + c.mutex.Unlock() + + return contReq +} + +func (c *Client) closeWithError(err error) { + c.conn.Close() + + c.mutex.Lock() + c.state = imap.ConnStateLogout + pendingCmds := c.pendingCmds + c.pendingCmds = nil + c.mutex.Unlock() + + for _, cmd := range pendingCmds { + c.completeCommand(cmd, err) + } +} + +// read continuously reads data coming from the server. +// +// All the data is decoded in the read goroutine, then dispatched via channels +// to pending commands. +func (c *Client) read() { + defer close(c.decCh) + defer func() { + if v := recover(); v != nil { + c.decErr = fmt.Errorf("imapclient: panic reading response: %v\n%s", v, debug.Stack()) + } + + cmdErr := c.decErr + if cmdErr == nil { + cmdErr = io.ErrUnexpectedEOF + } + c.closeWithError(cmdErr) + }() + + c.setReadTimeout(respReadTimeout) // We're waiting for the greeting + for { + // Ignore net.ErrClosed here, because we also call conn.Close in c.Close + if c.dec.EOF() || errors.Is(c.dec.Err(), net.ErrClosed) || errors.Is(c.dec.Err(), io.ErrClosedPipe) { + break + } + if err := c.readResponse(); err != nil { + c.decErr = err + break + } + if c.greetingErr != nil { + break + } + } +} + +func (c *Client) readResponse() error { + c.setReadTimeout(respReadTimeout) + defer c.setReadTimeout(idleReadTimeout) + + if c.dec.Special('+') { + if err := c.readContinueReq(); err != nil { + return fmt.Errorf("in continue-req: %v", err) + } + return nil + } + + var tag, typ string + if !c.dec.Expect(c.dec.Special('*') || c.dec.Atom(&tag), "'*' or atom") { + return fmt.Errorf("in response: cannot read tag: %v", c.dec.Err()) + } + if !c.dec.ExpectSP() { + return fmt.Errorf("in response: %v", c.dec.Err()) + } + if !c.dec.ExpectAtom(&typ) { + return fmt.Errorf("in response: cannot read type: %v", c.dec.Err()) + } + + // Change typ to uppercase, as it's case-insensitive + typ = strings.ToUpper(typ) + + var ( + token string + err error + startTLS *startTLSCommand + ) + if tag != "" { + token = "response-tagged" + startTLS, err = c.readResponseTagged(tag, typ) + } else { + token = "response-data" + err = c.readResponseData(typ) + } + if err != nil { + return fmt.Errorf("in %v: %v", token, err) + } + + if !c.dec.ExpectCRLF() { + return fmt.Errorf("in response: %v", c.dec.Err()) + } + + if startTLS != nil { + c.upgradeStartTLS(startTLS) + } + + return nil +} + +func (c *Client) readContinueReq() error { + var text string + if c.dec.SP() { + c.dec.Text(&text) + } + if !c.dec.ExpectCRLF() { + return c.dec.Err() + } + + var contReq *imapwire.ContinuationRequest + c.mutex.Lock() + if len(c.contReqs) > 0 { + contReq = c.contReqs[0].ContinuationRequest + c.contReqs = append(c.contReqs[:0], c.contReqs[1:]...) + } + c.mutex.Unlock() + + if contReq == nil { + return fmt.Errorf("received unmatched continuation request") + } + + contReq.Done(text) + return nil +} + +func (c *Client) readResponseTagged(tag, typ string) (startTLS *startTLSCommand, err error) { + cmd := c.deletePendingCmdByTag(tag) + if cmd == nil { + return nil, fmt.Errorf("received tagged response with unknown tag %q", tag) + } + + // We've removed the command from the pending queue above. Make sure we + // don't stall it on error. + defer func() { + if err != nil { + c.completeCommand(cmd, err) + } + }() + + // Some servers don't provide a text even if the RFC requires it, + // see #500 and #502 + hasSP := c.dec.SP() + + var code string + if hasSP && c.dec.Special('[') { // resp-text-code + if !c.dec.ExpectAtom(&code) { + return nil, fmt.Errorf("in resp-text-code: %v", c.dec.Err()) + } + // TODO: LONGENTRIES and MAXSIZE from METADATA + switch code { + case "CAPABILITY": // capability-data + caps, err := readCapabilities(c.dec) + if err != nil { + return nil, fmt.Errorf("in capability-data: %v", err) + } + c.setCaps(caps) + case "APPENDUID": + var ( + uidValidity uint32 + uid imap.UID + ) + if !c.dec.ExpectSP() || !c.dec.ExpectNumber(&uidValidity) || !c.dec.ExpectSP() || !c.dec.ExpectUID(&uid) { + return nil, fmt.Errorf("in resp-code-apnd: %v", c.dec.Err()) + } + if cmd, ok := cmd.(*AppendCommand); ok { + cmd.data.UID = uid + cmd.data.UIDValidity = uidValidity + } + case "COPYUID": + if !c.dec.ExpectSP() { + return nil, c.dec.Err() + } + uidValidity, srcUIDs, dstUIDs, err := readRespCodeCopyUID(c.dec) + if err != nil { + return nil, fmt.Errorf("in resp-code-copy: %v", err) + } + switch cmd := cmd.(type) { + case *CopyCommand: + cmd.data.UIDValidity = uidValidity + cmd.data.SourceUIDs = srcUIDs + cmd.data.DestUIDs = dstUIDs + case *MoveCommand: + // This can happen when Client.Move falls back to COPY + + // STORE + EXPUNGE + cmd.data.UIDValidity = uidValidity + cmd.data.SourceUIDs = srcUIDs + cmd.data.DestUIDs = dstUIDs + } + default: // [SP 1*] + if c.dec.SP() { + c.dec.DiscardUntilByte(']') + } + } + if !c.dec.ExpectSpecial(']') { + return nil, fmt.Errorf("in resp-text: %v", c.dec.Err()) + } + hasSP = c.dec.SP() + } + var text string + if hasSP && !c.dec.ExpectText(&text) { + return nil, fmt.Errorf("in resp-text: %v", c.dec.Err()) + } + + var cmdErr error + switch typ { + case "OK": + // nothing to do + case "NO", "BAD": + cmdErr = &imap.Error{ + Type: imap.StatusResponseType(typ), + Code: imap.ResponseCode(code), + Text: text, + } + default: + return nil, fmt.Errorf("in resp-cond-state: expected OK, NO or BAD status condition, but got %v", typ) + } + + c.completeCommand(cmd, cmdErr) + + if cmd, ok := cmd.(*startTLSCommand); ok && cmdErr == nil { + startTLS = cmd + } + + if cmdErr == nil && code != "CAPABILITY" { + switch cmd.(type) { + case *startTLSCommand, *loginCommand, *authenticateCommand, *unauthenticateCommand: + // These commands invalidate the capabilities + c.setCaps(nil) + } + } + + return startTLS, nil +} + +func (c *Client) readResponseData(typ string) error { + // number SP ("EXISTS" / "RECENT" / "FETCH" / "EXPUNGE") + var num uint32 + if typ[0] >= '0' && typ[0] <= '9' { + v, err := strconv.ParseUint(typ, 10, 32) + if err != nil { + return err + } + + num = uint32(v) + if !c.dec.ExpectSP() || !c.dec.ExpectAtom(&typ) { + return c.dec.Err() + } + } + + // All response type are case insensitive + switch strings.ToUpper(typ) { + case "OK", "PREAUTH", "NO", "BAD", "BYE": // resp-cond-state / resp-cond-bye / resp-cond-auth + // Some servers don't provide a text even if the RFC requires it, + // see #500 and #502 + hasSP := c.dec.SP() + + var code string + if hasSP && c.dec.Special('[') { // resp-text-code + if !c.dec.ExpectAtom(&code) { + return fmt.Errorf("in resp-text-code: %v", c.dec.Err()) + } + switch code { + case "CAPABILITY": // capability-data + caps, err := readCapabilities(c.dec) + if err != nil { + return fmt.Errorf("in capability-data: %v", err) + } + c.setCaps(caps) + case "PERMANENTFLAGS": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + flags, err := internal.ExpectFlagList(c.dec) + if err != nil { + return err + } + + c.mutex.Lock() + if c.state == imap.ConnStateSelected { + c.mailbox = c.mailbox.copy() + c.mailbox.PermanentFlags = flags + } + c.mutex.Unlock() + + if cmd := findPendingCmdByType[*SelectCommand](c); cmd != nil { + cmd.data.PermanentFlags = flags + } else if handler := c.options.unilateralDataHandler().Mailbox; handler != nil { + handler(&UnilateralDataMailbox{PermanentFlags: flags}) + } + case "UIDNEXT": + var uidNext imap.UID + if !c.dec.ExpectSP() || !c.dec.ExpectUID(&uidNext) { + return c.dec.Err() + } + if cmd := findPendingCmdByType[*SelectCommand](c); cmd != nil { + cmd.data.UIDNext = uidNext + } + case "UIDVALIDITY": + var uidValidity uint32 + if !c.dec.ExpectSP() || !c.dec.ExpectNumber(&uidValidity) { + return c.dec.Err() + } + if cmd := findPendingCmdByType[*SelectCommand](c); cmd != nil { + cmd.data.UIDValidity = uidValidity + } + case "COPYUID": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + uidValidity, srcUIDs, dstUIDs, err := readRespCodeCopyUID(c.dec) + if err != nil { + return fmt.Errorf("in resp-code-copy: %v", err) + } + if cmd := findPendingCmdByType[*MoveCommand](c); cmd != nil { + cmd.data.UIDValidity = uidValidity + cmd.data.SourceUIDs = srcUIDs + cmd.data.DestUIDs = dstUIDs + } + case "HIGHESTMODSEQ": + var modSeq uint64 + if !c.dec.ExpectSP() || !c.dec.ExpectModSeq(&modSeq) { + return c.dec.Err() + } + if cmd := findPendingCmdByType[*SelectCommand](c); cmd != nil { + cmd.data.HighestModSeq = modSeq + } + case "NOMODSEQ": + // ignore + default: // [SP 1*] + if c.dec.SP() { + c.dec.DiscardUntilByte(']') + } + } + if !c.dec.ExpectSpecial(']') { + return fmt.Errorf("in resp-text: %v", c.dec.Err()) + } + hasSP = c.dec.SP() + } + + var text string + if hasSP && !c.dec.ExpectText(&text) { + return fmt.Errorf("in resp-text: %v", c.dec.Err()) + } + + if code == "CLOSED" { + c.setState(imap.ConnStateAuthenticated) + } + + if !c.greetingRecv { + switch typ { + case "OK": + c.setState(imap.ConnStateNotAuthenticated) + case "PREAUTH": + c.setState(imap.ConnStateAuthenticated) + default: + c.setState(imap.ConnStateLogout) + c.greetingErr = &imap.Error{ + Type: imap.StatusResponseType(typ), + Code: imap.ResponseCode(code), + Text: text, + } + } + c.greetingRecv = true + if c.greetingErr == nil && code != "CAPABILITY" { + c.setCaps(nil) // request initial capabilities + } + close(c.greetingCh) + } + case "ID": + return c.handleID() + case "CAPABILITY": + return c.handleCapability() + case "ENABLED": + return c.handleEnabled() + case "NAMESPACE": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleNamespace() + case "FLAGS": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleFlags() + case "EXISTS": + return c.handleExists(num) + case "RECENT": + // ignore + case "LIST": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleList() + case "STATUS": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleStatus() + case "FETCH": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleFetch(num) + case "EXPUNGE": + return c.handleExpunge(num) + case "SEARCH": + return c.handleSearch() + case "ESEARCH": + return c.handleESearch() + case "SORT": + return c.handleSort() + case "THREAD": + return c.handleThread() + case "METADATA": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleMetadata() + case "QUOTA": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleQuota() + case "QUOTAROOT": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleQuotaRoot() + case "MYRIGHTS": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleMyRights() + case "ACL": + if !c.dec.ExpectSP() { + return c.dec.Err() + } + return c.handleGetACL() + default: + return fmt.Errorf("unsupported response type %q", typ) + } + + return nil +} + +// WaitGreeting waits for the server's initial greeting. +func (c *Client) WaitGreeting() error { + select { + case <-c.greetingCh: + return c.greetingErr + case <-c.decCh: + if c.decErr != nil { + return fmt.Errorf("got error before greeting: %v", c.decErr) + } + return fmt.Errorf("connection closed before greeting") + } +} + +// Noop sends a NOOP command. +func (c *Client) Noop() *Command { + cmd := &Command{} + c.beginCommand("NOOP", cmd).end() + return cmd +} + +// Logout sends a LOGOUT command. +// +// This command informs the server that the client is done with the connection. +func (c *Client) Logout() *Command { + cmd := &logoutCommand{} + c.beginCommand("LOGOUT", cmd).end() + return &cmd.Command +} + +// Login sends a LOGIN command. +func (c *Client) Login(username, password string) *Command { + cmd := &loginCommand{} + enc := c.beginCommand("LOGIN", cmd) + enc.SP().String(username).SP().String(password) + enc.end() + return &cmd.Command +} + +// Delete sends a DELETE command. +func (c *Client) Delete(mailbox string) *Command { + cmd := &Command{} + enc := c.beginCommand("DELETE", cmd) + enc.SP().Mailbox(mailbox) + enc.end() + return cmd +} + +// Rename sends a RENAME command. +func (c *Client) Rename(mailbox, newName string) *Command { + cmd := &Command{} + enc := c.beginCommand("RENAME", cmd) + enc.SP().Mailbox(mailbox).SP().Mailbox(newName) + enc.end() + return cmd +} + +// Subscribe sends a SUBSCRIBE command. +func (c *Client) Subscribe(mailbox string) *Command { + cmd := &Command{} + enc := c.beginCommand("SUBSCRIBE", cmd) + enc.SP().Mailbox(mailbox) + enc.end() + return cmd +} + +// Subscribe sends an UNSUBSCRIBE command. +func (c *Client) Unsubscribe(mailbox string) *Command { + cmd := &Command{} + enc := c.beginCommand("UNSUBSCRIBE", cmd) + enc.SP().Mailbox(mailbox) + enc.end() + return cmd +} + +func uidCmdName(name string, kind imapwire.NumKind) string { + switch kind { + case imapwire.NumKindSeq: + return name + case imapwire.NumKindUID: + return "UID " + name + default: + panic("imapclient: invalid imapwire.NumKind") + } +} + +type commandEncoder struct { + *imapwire.Encoder + client *Client + cmd *commandBase +} + +// end ends an outgoing command. +// +// A CRLF is written, the encoder is flushed and its lock is released. +func (ce *commandEncoder) end() { + if ce.Encoder != nil { + ce.flush() + } + ce.client.setWriteTimeout(0) + ce.client.encMutex.Unlock() +} + +// flush sends an outgoing command, but keeps the encoder lock. +// +// A CRLF is written and the encoder is flushed. Callers must call +// commandEncoder.end to release the lock. +func (ce *commandEncoder) flush() { + if err := ce.Encoder.CRLF(); err != nil { + // TODO: consider stashing the error in Client to return it in future + // calls + ce.client.closeWithError(err) + } + ce.Encoder = nil +} + +// Literal encodes a literal. +func (ce *commandEncoder) Literal(size int64) io.WriteCloser { + var contReq *imapwire.ContinuationRequest + ce.client.mutex.Lock() + hasCapLiteralMinus := ce.client.caps.Has(imap.CapLiteralMinus) + ce.client.mutex.Unlock() + if size > 4096 || !hasCapLiteralMinus { + contReq = ce.client.registerContReq(ce.cmd) + } + ce.client.setWriteTimeout(literalWriteTimeout) + return literalWriter{ + WriteCloser: ce.Encoder.Literal(size, contReq), + client: ce.client, + } +} + +type literalWriter struct { + io.WriteCloser + client *Client +} + +func (lw literalWriter) Close() error { + lw.client.setWriteTimeout(cmdWriteTimeout) + return lw.WriteCloser.Close() +} + +// continuationRequest is a pending continuation request. +type continuationRequest struct { + *imapwire.ContinuationRequest + cmd *commandBase +} + +// UnilateralDataMailbox describes a mailbox status update. +// +// If a field is nil, it hasn't changed. +type UnilateralDataMailbox struct { + NumMessages *uint32 + Flags []imap.Flag + PermanentFlags []imap.Flag +} + +// UnilateralDataHandler handles unilateral data. +// +// The handler will block the client while running. If the caller intends to +// perform slow operations, a buffered channel and a separate goroutine should +// be used. +// +// The handler will be invoked in an arbitrary goroutine. +// +// See Options.UnilateralDataHandler. +type UnilateralDataHandler struct { + Expunge func(seqNum uint32) + Mailbox func(data *UnilateralDataMailbox) + Fetch func(msg *FetchMessageData) + + // requires ENABLE METADATA or ENABLE SERVER-METADATA + Metadata func(mailbox string, entries []string) +} + +// command is an interface for IMAP commands. +// +// Commands are represented by the Command type, but can be extended by other +// types (e.g. CapabilityCommand). +type command interface { + base() *commandBase +} + +type commandBase struct { + tag string + done chan error + err error +} + +func (cmd *commandBase) base() *commandBase { + return cmd +} + +func (cmd *commandBase) wait() error { + if cmd.err == nil { + cmd.err = <-cmd.done + } + return cmd.err +} + +// Command is a basic IMAP command. +type Command struct { + commandBase +} + +// Wait blocks until the command has completed. +func (cmd *Command) Wait() error { + return cmd.wait() +} + +type loginCommand struct { + Command +} + +// logoutCommand is a LOGOUT command. +type logoutCommand struct { + Command +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/copy.go b/vendor/github.com/emersion/go-imap/v2/imapclient/copy.go new file mode 100644 index 0000000000..c1081d8259 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/copy.go @@ -0,0 +1,37 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// Copy sends a COPY command. +func (c *Client) Copy(numSet imap.NumSet, mailbox string) *CopyCommand { + cmd := &CopyCommand{} + enc := c.beginCommand(uidCmdName("COPY", imapwire.NumSetKind(numSet)), cmd) + enc.SP().NumSet(numSet).SP().Mailbox(mailbox) + enc.end() + return cmd +} + +// CopyCommand is a COPY command. +type CopyCommand struct { + commandBase + data imap.CopyData +} + +func (cmd *CopyCommand) Wait() (*imap.CopyData, error) { + return &cmd.data, cmd.wait() +} + +func readRespCodeCopyUID(dec *imapwire.Decoder) (uidValidity uint32, srcUIDs, dstUIDs imap.UIDSet, err error) { + if !dec.ExpectNumber(&uidValidity) || !dec.ExpectSP() || !dec.ExpectUIDSet(&srcUIDs) || !dec.ExpectSP() || !dec.ExpectUIDSet(&dstUIDs) { + return 0, nil, nil, dec.Err() + } + if srcUIDs.Dynamic() || dstUIDs.Dynamic() { + return 0, nil, nil, fmt.Errorf("imapclient: server returned dynamic number set in COPYUID response") + } + return uidValidity, srcUIDs, dstUIDs, nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/create.go b/vendor/github.com/emersion/go-imap/v2/imapclient/create.go new file mode 100644 index 0000000000..827ecce99d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/create.go @@ -0,0 +1,21 @@ +package imapclient + +import ( + "github.com/emersion/go-imap/v2" +) + +// Create sends a CREATE command. +// +// A nil options pointer is equivalent to a zero options value. +func (c *Client) Create(mailbox string, options *imap.CreateOptions) *Command { + cmd := &Command{} + enc := c.beginCommand("CREATE", cmd) + enc.SP().Mailbox(mailbox) + if options != nil && len(options.SpecialUse) > 0 { + enc.SP().Special('(').Atom("USE").SP().List(len(options.SpecialUse), func(i int) { + enc.MailboxAttr(options.SpecialUse[i]) + }).Special(')') + } + enc.end() + return cmd +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/enable.go b/vendor/github.com/emersion/go-imap/v2/imapclient/enable.go new file mode 100644 index 0000000000..895766643d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/enable.go @@ -0,0 +1,69 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" +) + +// Enable sends an ENABLE command. +// +// This command requires support for IMAP4rev2 or the ENABLE extension. +func (c *Client) Enable(caps ...imap.Cap) *EnableCommand { + // Enabling an extension may change the IMAP syntax, so only allow the + // extensions we support here + for _, name := range caps { + switch name { + case imap.CapIMAP4rev2, imap.CapUTF8Accept, imap.CapMetadata, imap.CapMetadataServer: + // ok + default: + done := make(chan error) + close(done) + err := fmt.Errorf("imapclient: cannot enable %q: not supported", name) + return &EnableCommand{commandBase: commandBase{done: done, err: err}} + } + } + + cmd := &EnableCommand{} + enc := c.beginCommand("ENABLE", cmd) + for _, c := range caps { + enc.SP().Atom(string(c)) + } + enc.end() + return cmd +} + +func (c *Client) handleEnabled() error { + caps, err := readCapabilities(c.dec) + if err != nil { + return err + } + + c.mutex.Lock() + for name := range caps { + c.enabled[name] = struct{}{} + } + c.mutex.Unlock() + + if cmd := findPendingCmdByType[*EnableCommand](c); cmd != nil { + cmd.data.Caps = caps + } + + return nil +} + +// EnableCommand is an ENABLE command. +type EnableCommand struct { + commandBase + data EnableData +} + +func (cmd *EnableCommand) Wait() (*EnableData, error) { + return &cmd.data, cmd.wait() +} + +// EnableData is the data returned by the ENABLE command. +type EnableData struct { + // Capabilities that were successfully enabled + Caps imap.CapSet +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/expunge.go b/vendor/github.com/emersion/go-imap/v2/imapclient/expunge.go new file mode 100644 index 0000000000..11e477c1fd --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/expunge.go @@ -0,0 +1,84 @@ +package imapclient + +import ( + "github.com/emersion/go-imap/v2" +) + +// Expunge sends an EXPUNGE command. +func (c *Client) Expunge() *ExpungeCommand { + cmd := &ExpungeCommand{seqNums: make(chan uint32, 128)} + c.beginCommand("EXPUNGE", cmd).end() + return cmd +} + +// UIDExpunge sends a UID EXPUNGE command. +// +// This command requires support for IMAP4rev2 or the UIDPLUS extension. +func (c *Client) UIDExpunge(uids imap.UIDSet) *ExpungeCommand { + cmd := &ExpungeCommand{seqNums: make(chan uint32, 128)} + enc := c.beginCommand("UID EXPUNGE", cmd) + enc.SP().NumSet(uids) + enc.end() + return cmd +} + +func (c *Client) handleExpunge(seqNum uint32) error { + c.mutex.Lock() + if c.state == imap.ConnStateSelected && c.mailbox.NumMessages > 0 { + c.mailbox = c.mailbox.copy() + c.mailbox.NumMessages-- + } + c.mutex.Unlock() + + cmd := findPendingCmdByType[*ExpungeCommand](c) + if cmd != nil { + cmd.seqNums <- seqNum + } else if handler := c.options.unilateralDataHandler().Expunge; handler != nil { + handler(seqNum) + } + + return nil +} + +// ExpungeCommand is an EXPUNGE command. +// +// The caller must fully consume the ExpungeCommand. A simple way to do so is +// to defer a call to FetchCommand.Close. +type ExpungeCommand struct { + commandBase + seqNums chan uint32 +} + +// Next advances to the next expunged message sequence number. +// +// On success, the message sequence number is returned. On error or if there +// are no more messages, 0 is returned. To check the error value, use Close. +func (cmd *ExpungeCommand) Next() uint32 { + return <-cmd.seqNums +} + +// Close releases the command. +// +// Calling Close unblocks the IMAP client decoder and lets it read the next +// responses. Next will always return nil after Close. +func (cmd *ExpungeCommand) Close() error { + for cmd.Next() != 0 { + // ignore + } + return cmd.wait() +} + +// Collect accumulates expunged sequence numbers into a list. +// +// This is equivalent to calling Next repeatedly and then Close. +func (cmd *ExpungeCommand) Collect() ([]uint32, error) { + var l []uint32 + for { + seqNum := cmd.Next() + if seqNum == 0 { + break + } + l = append(l, seqNum) + } + return l, cmd.Close() +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/fetch.go b/vendor/github.com/emersion/go-imap/v2/imapclient/fetch.go new file mode 100644 index 0000000000..74d95f1336 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/fetch.go @@ -0,0 +1,1326 @@ +package imapclient + +import ( + "fmt" + "io" + netmail "net/mail" + "strings" + "time" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" + "github.com/emersion/go-imap/v2/internal/imapwire" + "github.com/emersion/go-message/mail" +) + +// Fetch sends a FETCH command. +// +// The caller must fully consume the FetchCommand. A simple way to do so is to +// defer a call to FetchCommand.Close. +// +// A nil options pointer is equivalent to a zero options value. +func (c *Client) Fetch(numSet imap.NumSet, options *imap.FetchOptions) *FetchCommand { + if options == nil { + options = new(imap.FetchOptions) + } + + numKind := imapwire.NumSetKind(numSet) + + cmd := &FetchCommand{ + numSet: numSet, + msgs: make(chan *FetchMessageData, 128), + } + enc := c.beginCommand(uidCmdName("FETCH", numKind), cmd) + enc.SP().NumSet(numSet).SP() + writeFetchItems(enc.Encoder, numKind, options) + if options.ChangedSince != 0 { + enc.SP().Special('(').Atom("CHANGEDSINCE").SP().ModSeq(options.ChangedSince).Special(')') + } + enc.end() + return cmd +} + +func writeFetchItems(enc *imapwire.Encoder, numKind imapwire.NumKind, options *imap.FetchOptions) { + listEnc := enc.BeginList() + + // Ensure we request UID as the first data item for UID FETCH, to be safer. + // We want to get it before any literal. + if options.UID || numKind == imapwire.NumKindUID { + listEnc.Item().Atom("UID") + } + + m := map[string]bool{ + "BODY": options.BodyStructure != nil && !options.BodyStructure.Extended, + "BODYSTRUCTURE": options.BodyStructure != nil && options.BodyStructure.Extended, + "ENVELOPE": options.Envelope, + "FLAGS": options.Flags, + "INTERNALDATE": options.InternalDate, + "RFC822.SIZE": options.RFC822Size, + "MODSEQ": options.ModSeq, + } + for k, req := range m { + if req { + listEnc.Item().Atom(k) + } + } + + for _, bs := range options.BodySection { + writeFetchItemBodySection(listEnc.Item(), bs) + } + for _, bs := range options.BinarySection { + writeFetchItemBinarySection(listEnc.Item(), bs) + } + for _, bss := range options.BinarySectionSize { + writeFetchItemBinarySectionSize(listEnc.Item(), bss) + } + + listEnc.End() +} + +func writeFetchItemBodySection(enc *imapwire.Encoder, item *imap.FetchItemBodySection) { + enc.Atom("BODY") + if item.Peek { + enc.Atom(".PEEK") + } + enc.Special('[') + writeSectionPart(enc, item.Part) + if len(item.Part) > 0 && item.Specifier != imap.PartSpecifierNone { + enc.Special('.') + } + if item.Specifier != imap.PartSpecifierNone { + enc.Atom(string(item.Specifier)) + + var headerList []string + if len(item.HeaderFields) > 0 { + headerList = item.HeaderFields + enc.Atom(".FIELDS") + } else if len(item.HeaderFieldsNot) > 0 { + headerList = item.HeaderFieldsNot + enc.Atom(".FIELDS.NOT") + } + + if len(headerList) > 0 { + enc.SP().List(len(headerList), func(i int) { + enc.String(headerList[i]) + }) + } + } + enc.Special(']') + writeSectionPartial(enc, item.Partial) +} + +func writeFetchItemBinarySection(enc *imapwire.Encoder, item *imap.FetchItemBinarySection) { + enc.Atom("BINARY") + if item.Peek { + enc.Atom(".PEEK") + } + enc.Special('[') + writeSectionPart(enc, item.Part) + enc.Special(']') + writeSectionPartial(enc, item.Partial) +} + +func writeFetchItemBinarySectionSize(enc *imapwire.Encoder, item *imap.FetchItemBinarySectionSize) { + enc.Atom("BINARY.SIZE") + enc.Special('[') + writeSectionPart(enc, item.Part) + enc.Special(']') +} + +func writeSectionPart(enc *imapwire.Encoder, part []int) { + if len(part) == 0 { + return + } + + var l []string + for _, num := range part { + l = append(l, fmt.Sprintf("%v", num)) + } + enc.Atom(strings.Join(l, ".")) +} + +func writeSectionPartial(enc *imapwire.Encoder, partial *imap.SectionPartial) { + if partial == nil { + return + } + enc.Special('<').Number64(partial.Offset).Special('.').Number64(partial.Size).Special('>') +} + +// FetchCommand is a FETCH command. +type FetchCommand struct { + commandBase + + numSet imap.NumSet + recvSeqSet imap.SeqSet + recvUIDSet imap.UIDSet + + msgs chan *FetchMessageData + prev *FetchMessageData +} + +func (cmd *FetchCommand) recvSeqNum(seqNum uint32) bool { + set, ok := cmd.numSet.(imap.SeqSet) + if !ok || !set.Contains(seqNum) { + return false + } + + if cmd.recvSeqSet.Contains(seqNum) { + return false + } + + cmd.recvSeqSet.AddNum(seqNum) + return true +} + +func (cmd *FetchCommand) recvUID(uid imap.UID) bool { + set, ok := cmd.numSet.(imap.UIDSet) + if !ok || !set.Contains(uid) { + return false + } + + if cmd.recvUIDSet.Contains(uid) { + return false + } + + cmd.recvUIDSet.AddNum(uid) + return true +} + +// Next advances to the next message. +// +// On success, the message is returned. On error or if there are no more +// messages, nil is returned. To check the error value, use Close. +func (cmd *FetchCommand) Next() *FetchMessageData { + if cmd.prev != nil { + cmd.prev.discard() + } + cmd.prev = <-cmd.msgs + return cmd.prev +} + +// Close releases the command. +// +// Calling Close unblocks the IMAP client decoder and lets it read the next +// responses. Next will always return nil after Close. +func (cmd *FetchCommand) Close() error { + for cmd.Next() != nil { + // ignore + } + return cmd.wait() +} + +// Collect accumulates message data into a list. +// +// This method will read and store message contents in memory. This is +// acceptable when the message contents have a reasonable size, but may not be +// suitable when fetching e.g. attachments. +// +// This is equivalent to calling Next repeatedly and then Close. +func (cmd *FetchCommand) Collect() ([]*FetchMessageBuffer, error) { + defer cmd.Close() + + var l []*FetchMessageBuffer + for { + msg := cmd.Next() + if msg == nil { + break + } + + buf, err := msg.Collect() + if err != nil { + return l, err + } + + l = append(l, buf) + } + return l, cmd.Close() +} + +func matchFetchItemBodySection(cmd, resp *imap.FetchItemBodySection) bool { + if cmd.Specifier != resp.Specifier { + return false + } + + if !intSliceEqual(cmd.Part, resp.Part) { + return false + } + if !stringSliceEqualFold(cmd.HeaderFields, resp.HeaderFields) { + return false + } + if !stringSliceEqualFold(cmd.HeaderFieldsNot, resp.HeaderFieldsNot) { + return false + } + + if (cmd.Partial == nil) != (resp.Partial == nil) { + return false + } + if cmd.Partial != nil && cmd.Partial.Offset != resp.Partial.Offset { + return false + } + + // Ignore Partial.Size and Peek: these are not echoed back by the server + return true +} + +func matchFetchItemBinarySection(cmd, resp *imap.FetchItemBinarySection) bool { + // Ignore Partial and Peek: these are not echoed back by the server + return intSliceEqual(cmd.Part, resp.Part) +} + +func intSliceEqual(a, b []int) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} + +func stringSliceEqualFold(a, b []string) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if !strings.EqualFold(a[i], b[i]) { + return false + } + } + return true +} + +// FetchMessageData contains a message's FETCH data. +type FetchMessageData struct { + SeqNum uint32 + + items chan FetchItemData + prev FetchItemData +} + +// Next advances to the next data item for this message. +// +// If there is one or more data items left, the next item is returned. +// Otherwise nil is returned. +func (data *FetchMessageData) Next() FetchItemData { + if d, ok := data.prev.(discarder); ok { + d.discard() + } + + item := <-data.items + data.prev = item + return item +} + +func (data *FetchMessageData) discard() { + for { + if item := data.Next(); item == nil { + break + } + } +} + +// Collect accumulates message data into a struct. +// +// This method will read and store message contents in memory. This is +// acceptable when the message contents have a reasonable size, but may not be +// suitable when fetching e.g. attachments. +func (data *FetchMessageData) Collect() (*FetchMessageBuffer, error) { + defer data.discard() + + buf := &FetchMessageBuffer{SeqNum: data.SeqNum} + for { + item := data.Next() + if item == nil { + break + } + if err := buf.populateItemData(item); err != nil { + return buf, err + } + } + return buf, nil +} + +// FetchItemData contains a message's FETCH item data. +type FetchItemData interface { + fetchItemData() +} + +var ( + _ FetchItemData = FetchItemDataBodySection{} + _ FetchItemData = FetchItemDataBinarySection{} + _ FetchItemData = FetchItemDataFlags{} + _ FetchItemData = FetchItemDataEnvelope{} + _ FetchItemData = FetchItemDataInternalDate{} + _ FetchItemData = FetchItemDataRFC822Size{} + _ FetchItemData = FetchItemDataUID{} + _ FetchItemData = FetchItemDataBodyStructure{} +) + +type discarder interface { + discard() +} + +var ( + _ discarder = FetchItemDataBodySection{} + _ discarder = FetchItemDataBinarySection{} +) + +// FetchItemDataBodySection holds data returned by FETCH BODY[]. +// +// Literal might be nil. +type FetchItemDataBodySection struct { + Section *imap.FetchItemBodySection + Literal imap.LiteralReader +} + +func (FetchItemDataBodySection) fetchItemData() {} + +func (item FetchItemDataBodySection) discard() { + if item.Literal != nil { + io.Copy(io.Discard, item.Literal) + } +} + +// MatchCommand checks whether a section returned by the server in a response +// is compatible with a section requested by the client in a command. +func (dataItem *FetchItemDataBodySection) MatchCommand(item *imap.FetchItemBodySection) bool { + return matchFetchItemBodySection(item, dataItem.Section) +} + +// FetchItemDataBinarySection holds data returned by FETCH BINARY[]. +// +// Literal might be nil. +type FetchItemDataBinarySection struct { + Section *imap.FetchItemBinarySection + Literal imap.LiteralReader +} + +func (FetchItemDataBinarySection) fetchItemData() {} + +func (item FetchItemDataBinarySection) discard() { + if item.Literal != nil { + io.Copy(io.Discard, item.Literal) + } +} + +// MatchCommand checks whether a section returned by the server in a response +// is compatible with a section requested by the client in a command. +func (dataItem *FetchItemDataBinarySection) MatchCommand(item *imap.FetchItemBinarySection) bool { + return matchFetchItemBinarySection(item, dataItem.Section) +} + +// FetchItemDataFlags holds data returned by FETCH FLAGS. +type FetchItemDataFlags struct { + Flags []imap.Flag +} + +func (FetchItemDataFlags) fetchItemData() {} + +// FetchItemDataEnvelope holds data returned by FETCH ENVELOPE. +type FetchItemDataEnvelope struct { + Envelope *imap.Envelope +} + +func (FetchItemDataEnvelope) fetchItemData() {} + +// FetchItemDataInternalDate holds data returned by FETCH INTERNALDATE. +type FetchItemDataInternalDate struct { + Time time.Time +} + +func (FetchItemDataInternalDate) fetchItemData() {} + +// FetchItemDataRFC822Size holds data returned by FETCH RFC822.SIZE. +type FetchItemDataRFC822Size struct { + Size int64 +} + +func (FetchItemDataRFC822Size) fetchItemData() {} + +// FetchItemDataUID holds data returned by FETCH UID. +type FetchItemDataUID struct { + UID imap.UID +} + +func (FetchItemDataUID) fetchItemData() {} + +// FetchItemDataBodyStructure holds data returned by FETCH BODYSTRUCTURE or +// FETCH BODY. +type FetchItemDataBodyStructure struct { + BodyStructure imap.BodyStructure + IsExtended bool // True if BODYSTRUCTURE, false if BODY +} + +func (FetchItemDataBodyStructure) fetchItemData() {} + +// FetchItemDataBinarySectionSize holds data returned by FETCH BINARY.SIZE[]. +type FetchItemDataBinarySectionSize struct { + Part []int + Size uint32 +} + +func (FetchItemDataBinarySectionSize) fetchItemData() {} + +// MatchCommand checks whether a section size returned by the server in a +// response is compatible with a section size requested by the client in a +// command. +func (data *FetchItemDataBinarySectionSize) MatchCommand(item *imap.FetchItemBinarySectionSize) bool { + return intSliceEqual(item.Part, data.Part) +} + +// FetchItemDataModSeq holds data returned by FETCH MODSEQ. +// +// This requires the CONDSTORE extension. +type FetchItemDataModSeq struct { + ModSeq uint64 +} + +func (FetchItemDataModSeq) fetchItemData() {} + +// FetchBodySectionBuffer is a buffer for the data returned by +// FetchItemBodySection. +type FetchBodySectionBuffer struct { + Section *imap.FetchItemBodySection + Bytes []byte +} + +// FetchBinarySectionBuffer is a buffer for the data returned by +// FetchItemBinarySection. +type FetchBinarySectionBuffer struct { + Section *imap.FetchItemBinarySection + Bytes []byte +} + +// FetchMessageBuffer is a buffer for the data returned by FetchMessageData. +// +// The SeqNum field is always populated. All remaining fields are optional. +type FetchMessageBuffer struct { + SeqNum uint32 + Flags []imap.Flag + Envelope *imap.Envelope + InternalDate time.Time + RFC822Size int64 + UID imap.UID + BodyStructure imap.BodyStructure + BodySection []FetchBodySectionBuffer + BinarySection []FetchBinarySectionBuffer + BinarySectionSize []FetchItemDataBinarySectionSize + ModSeq uint64 // requires CONDSTORE +} + +func (buf *FetchMessageBuffer) populateItemData(item FetchItemData) error { + switch item := item.(type) { + case FetchItemDataBodySection: + var b []byte + if item.Literal != nil { + var err error + b, err = io.ReadAll(item.Literal) + if err != nil { + return err + } + } + buf.BodySection = append(buf.BodySection, FetchBodySectionBuffer{ + Section: item.Section, + Bytes: b, + }) + case FetchItemDataBinarySection: + var b []byte + if item.Literal != nil { + var err error + b, err = io.ReadAll(item.Literal) + if err != nil { + return err + } + } + buf.BinarySection = append(buf.BinarySection, FetchBinarySectionBuffer{ + Section: item.Section, + Bytes: b, + }) + case FetchItemDataFlags: + buf.Flags = item.Flags + case FetchItemDataEnvelope: + buf.Envelope = item.Envelope + case FetchItemDataInternalDate: + buf.InternalDate = item.Time + case FetchItemDataRFC822Size: + buf.RFC822Size = item.Size + case FetchItemDataUID: + buf.UID = item.UID + case FetchItemDataBodyStructure: + buf.BodyStructure = item.BodyStructure + case FetchItemDataBinarySectionSize: + buf.BinarySectionSize = append(buf.BinarySectionSize, item) + case FetchItemDataModSeq: + buf.ModSeq = item.ModSeq + default: + panic(fmt.Errorf("unsupported fetch item data %T", item)) + } + return nil +} + +// FindBodySection returns the contents of a requested body section. +// +// If the body section is not found, nil is returned. +func (buf *FetchMessageBuffer) FindBodySection(section *imap.FetchItemBodySection) []byte { + for _, s := range buf.BodySection { + if matchFetchItemBodySection(section, s.Section) { + return s.Bytes + } + } + return nil +} + +// FindBinarySection returns the contents of a requested binary section. +// +// If the binary section is not found, nil is returned. +func (buf *FetchMessageBuffer) FindBinarySection(section *imap.FetchItemBinarySection) []byte { + for _, s := range buf.BinarySection { + if matchFetchItemBinarySection(section, s.Section) { + return s.Bytes + } + } + return nil +} + +// FindBinarySectionSize returns a requested binary section size. +// +// If the binary section size is not found, false is returned. +func (buf *FetchMessageBuffer) FindBinarySectionSize(part []int) (uint32, bool) { + for _, s := range buf.BinarySectionSize { + if intSliceEqual(part, s.Part) { + return s.Size, true + } + } + return 0, false +} + +func (c *Client) handleFetch(seqNum uint32) error { + dec := c.dec + + items := make(chan FetchItemData, 32) + defer close(items) + + msg := &FetchMessageData{SeqNum: seqNum, items: items} + + // We're in a tricky situation: to know whether this FETCH response needs + // to be handled by a pending command, we may need to look at the UID in + // the response data. But the response data comes in in a streaming + // fashion: it can contain literals. Assume that the UID will be returned + // before any literal. + var uid imap.UID + handled := false + handleMsg := func() { + if handled { + return + } + + cmd := c.findPendingCmdFunc(func(anyCmd command) bool { + cmd, ok := anyCmd.(*FetchCommand) + if !ok { + return false + } + + // Skip if we haven't requested or already handled this message + if _, ok := cmd.numSet.(imap.UIDSet); ok { + return uid != 0 && cmd.recvUID(uid) + } else { + return seqNum != 0 && cmd.recvSeqNum(seqNum) + } + }) + if cmd != nil { + cmd := cmd.(*FetchCommand) + cmd.msgs <- msg + } else if handler := c.options.unilateralDataHandler().Fetch; handler != nil { + go handler(msg) + } else { + go msg.discard() + } + + handled = true + } + defer handleMsg() + + numAtts := 0 + return dec.ExpectList(func() error { + var attName string + if !dec.Expect(dec.Func(&attName, isMsgAttNameChar), "msg-att name") { + return dec.Err() + } + attName = strings.ToUpper(attName) + + var ( + item FetchItemData + done chan struct{} + ) + switch attName { + case "FLAGS": + if !dec.ExpectSP() { + return dec.Err() + } + + flags, err := internal.ExpectFlagList(dec) + if err != nil { + return err + } + + item = FetchItemDataFlags{Flags: flags} + case "ENVELOPE": + if !dec.ExpectSP() { + return dec.Err() + } + + envelope, err := readEnvelope(dec, &c.options) + if err != nil { + return fmt.Errorf("in envelope: %v", err) + } + + item = FetchItemDataEnvelope{Envelope: envelope} + case "INTERNALDATE": + if !dec.ExpectSP() { + return dec.Err() + } + + t, err := internal.ExpectDateTime(dec) + if err != nil { + return err + } + + item = FetchItemDataInternalDate{Time: t} + case "RFC822.SIZE": + var size int64 + if !dec.ExpectSP() || !dec.ExpectNumber64(&size) { + return dec.Err() + } + + item = FetchItemDataRFC822Size{Size: size} + case "UID": + if !dec.ExpectSP() || !dec.ExpectUID(&uid) { + return dec.Err() + } + + item = FetchItemDataUID{UID: uid} + case "BODY", "BINARY": + if dec.Special('[') { + var section interface{} + switch attName { + case "BODY": + var err error + section, err = readSectionSpec(dec) + if err != nil { + return fmt.Errorf("in section-spec: %v", err) + } + case "BINARY": + part, dot := readSectionPart(dec) + if dot { + return fmt.Errorf("in section-binary: expected number after dot") + } + if !dec.ExpectSpecial(']') { + return dec.Err() + } + section = &imap.FetchItemBinarySection{Part: part} + } + + if !dec.ExpectSP() { + return dec.Err() + } + + // Ignore literal8 marker, if any + if attName == "BINARY" { + dec.Special('~') + } + + lit, _, ok := dec.ExpectNStringReader() + if !ok { + return dec.Err() + } + + var fetchLit imap.LiteralReader + if lit != nil { + done = make(chan struct{}) + fetchLit = &fetchLiteralReader{ + LiteralReader: lit, + ch: done, + } + } + + switch section := section.(type) { + case *imap.FetchItemBodySection: + item = FetchItemDataBodySection{ + Section: section, + Literal: fetchLit, + } + case *imap.FetchItemBinarySection: + item = FetchItemDataBinarySection{ + Section: section, + Literal: fetchLit, + } + } + break + } + if !dec.Expect(attName == "BODY", "'['") { + return dec.Err() + } + fallthrough + case "BODYSTRUCTURE": + if !dec.ExpectSP() { + return dec.Err() + } + + bodyStruct, err := readBody(dec, &c.options) + if err != nil { + return err + } + + item = FetchItemDataBodyStructure{ + BodyStructure: bodyStruct, + IsExtended: attName == "BODYSTRUCTURE", + } + case "BINARY.SIZE": + if !dec.ExpectSpecial('[') { + return dec.Err() + } + part, dot := readSectionPart(dec) + if dot { + return fmt.Errorf("in section-binary: expected number after dot") + } + + var size uint32 + if !dec.ExpectSpecial(']') || !dec.ExpectSP() || !dec.ExpectNumber(&size) { + return dec.Err() + } + + item = FetchItemDataBinarySectionSize{ + Part: part, + Size: size, + } + case "MODSEQ": + var modSeq uint64 + if !dec.ExpectSP() || !dec.ExpectSpecial('(') || !dec.ExpectModSeq(&modSeq) || !dec.ExpectSpecial(')') { + return dec.Err() + } + item = FetchItemDataModSeq{ModSeq: modSeq} + default: + return fmt.Errorf("unsupported msg-att name: %q", attName) + } + + numAtts++ + if numAtts > cap(items) || done != nil { + // To avoid deadlocking we need to ask the message handler to + // consume the data + handleMsg() + } + + if done != nil { + c.setReadTimeout(literalReadTimeout) + } + items <- item + if done != nil { + <-done + c.setReadTimeout(respReadTimeout) + } + return nil + }) +} + +func isMsgAttNameChar(ch byte) bool { + return ch != '[' && imapwire.IsAtomChar(ch) +} + +func readEnvelope(dec *imapwire.Decoder, options *Options) (*imap.Envelope, error) { + var envelope imap.Envelope + + if !dec.ExpectSpecial('(') { + return nil, dec.Err() + } + + var date, subject string + if !dec.ExpectNString(&date) || !dec.ExpectSP() || !dec.ExpectNString(&subject) || !dec.ExpectSP() { + return nil, dec.Err() + } + // TODO: handle error + envelope.Date, _ = netmail.ParseDate(date) + envelope.Subject, _ = options.decodeText(subject) + + addrLists := []struct { + name string + out *[]imap.Address + }{ + {"env-from", &envelope.From}, + {"env-sender", &envelope.Sender}, + {"env-reply-to", &envelope.ReplyTo}, + {"env-to", &envelope.To}, + {"env-cc", &envelope.Cc}, + {"env-bcc", &envelope.Bcc}, + } + for _, addrList := range addrLists { + l, err := readAddressList(dec, options) + if err != nil { + return nil, fmt.Errorf("in %v: %v", addrList.name, err) + } else if !dec.ExpectSP() { + return nil, dec.Err() + } + *addrList.out = l + } + + var inReplyTo, messageID string + if !dec.ExpectNString(&inReplyTo) || !dec.ExpectSP() || !dec.ExpectNString(&messageID) { + return nil, dec.Err() + } + // TODO: handle errors + envelope.InReplyTo, _ = parseMsgIDList(inReplyTo) + envelope.MessageID, _ = parseMsgID(messageID) + + if !dec.ExpectSpecial(')') { + return nil, dec.Err() + } + return &envelope, nil +} + +func readAddressList(dec *imapwire.Decoder, options *Options) ([]imap.Address, error) { + var l []imap.Address + err := dec.ExpectNList(func() error { + addr, err := readAddress(dec, options) + if err != nil { + return err + } + l = append(l, *addr) + return nil + }) + return l, err +} + +func readAddress(dec *imapwire.Decoder, options *Options) (*imap.Address, error) { + var ( + addr imap.Address + name string + obsRoute string + ) + ok := dec.ExpectSpecial('(') && + dec.ExpectNString(&name) && dec.ExpectSP() && + dec.ExpectNString(&obsRoute) && dec.ExpectSP() && + dec.ExpectNString(&addr.Mailbox) && dec.ExpectSP() && + dec.ExpectNString(&addr.Host) && dec.ExpectSpecial(')') + if !ok { + return nil, fmt.Errorf("in address: %v", dec.Err()) + } + // TODO: handle error + addr.Name, _ = options.decodeText(name) + return &addr, nil +} + +func parseMsgID(s string) (string, error) { + var h mail.Header + h.Set("Message-Id", s) + return h.MessageID() +} + +func parseMsgIDList(s string) ([]string, error) { + var h mail.Header + h.Set("In-Reply-To", s) + return h.MsgIDList("In-Reply-To") +} + +func readBody(dec *imapwire.Decoder, options *Options) (imap.BodyStructure, error) { + if !dec.ExpectSpecial('(') { + return nil, dec.Err() + } + + var ( + mediaType string + token string + bs imap.BodyStructure + err error + ) + if dec.String(&mediaType) { + token = "body-type-1part" + bs, err = readBodyType1part(dec, mediaType, options) + } else { + token = "body-type-mpart" + bs, err = readBodyTypeMpart(dec, options) + } + if err != nil { + return nil, fmt.Errorf("in %v: %v", token, err) + } + + for dec.SP() { + if !dec.DiscardValue() { + return nil, dec.Err() + } + } + + if !dec.ExpectSpecial(')') { + return nil, dec.Err() + } + + return bs, nil +} + +func readBodyType1part(dec *imapwire.Decoder, typ string, options *Options) (*imap.BodyStructureSinglePart, error) { + bs := imap.BodyStructureSinglePart{Type: typ} + + if !dec.ExpectSP() || !dec.ExpectString(&bs.Subtype) || !dec.ExpectSP() { + return nil, dec.Err() + } + var err error + bs.Params, err = readBodyFldParam(dec, options) + if err != nil { + return nil, err + } + + var description string + if !dec.ExpectSP() || !dec.ExpectNString(&bs.ID) || !dec.ExpectSP() || !dec.ExpectNString(&description) || !dec.ExpectSP() || !dec.ExpectNString(&bs.Encoding) || !dec.ExpectSP() || !dec.ExpectBodyFldOctets(&bs.Size) { + return nil, dec.Err() + } + + // Content-Transfer-Encoding should always be set, but some non-standard + // servers leave it NIL. Default to 7BIT. + if bs.Encoding == "" { + bs.Encoding = "7BIT" + } + + // TODO: handle errors + bs.Description, _ = options.decodeText(description) + + // Some servers don't include the extra fields for message and text + // (see https://github.com/emersion/go-imap/issues/557) + hasSP := dec.SP() + if !hasSP { + return &bs, nil + } + + if strings.EqualFold(bs.Type, "message") && (strings.EqualFold(bs.Subtype, "rfc822") || strings.EqualFold(bs.Subtype, "global")) { + var msg imap.BodyStructureMessageRFC822 + + msg.Envelope, err = readEnvelope(dec, options) + if err != nil { + return nil, err + } + + if !dec.ExpectSP() { + return nil, dec.Err() + } + + msg.BodyStructure, err = readBody(dec, options) + if err != nil { + return nil, err + } + + if !dec.ExpectSP() || !dec.ExpectNumber64(&msg.NumLines) { + return nil, dec.Err() + } + + bs.MessageRFC822 = &msg + hasSP = false + } else if strings.EqualFold(bs.Type, "text") { + var text imap.BodyStructureText + + if !dec.ExpectNumber64(&text.NumLines) { + return nil, dec.Err() + } + + bs.Text = &text + hasSP = false + } + + if !hasSP { + hasSP = dec.SP() + } + if hasSP { + bs.Extended, err = readBodyExt1part(dec, options) + if err != nil { + return nil, fmt.Errorf("in body-ext-1part: %v", err) + } + } + + return &bs, nil +} + +func readBodyExt1part(dec *imapwire.Decoder, options *Options) (*imap.BodyStructureSinglePartExt, error) { + var ext imap.BodyStructureSinglePartExt + + var md5 string + if !dec.ExpectNString(&md5) { + return nil, dec.Err() + } + + if !dec.SP() { + return &ext, nil + } + + var err error + ext.Disposition, err = readBodyFldDsp(dec, options) + if err != nil { + return nil, fmt.Errorf("in body-fld-dsp: %v", err) + } + + if !dec.SP() { + return &ext, nil + } + + ext.Language, err = readBodyFldLang(dec) + if err != nil { + return nil, fmt.Errorf("in body-fld-lang: %v", err) + } + + if !dec.SP() { + return &ext, nil + } + + if !dec.ExpectNString(&ext.Location) { + return nil, dec.Err() + } + + return &ext, nil +} + +func readBodyTypeMpart(dec *imapwire.Decoder, options *Options) (*imap.BodyStructureMultiPart, error) { + var bs imap.BodyStructureMultiPart + + for { + child, err := readBody(dec, options) + if err != nil { + return nil, err + } + bs.Children = append(bs.Children, child) + + if dec.SP() && dec.String(&bs.Subtype) { + break + } + } + + if dec.SP() { + var err error + bs.Extended, err = readBodyExtMpart(dec, options) + if err != nil { + return nil, fmt.Errorf("in body-ext-mpart: %v", err) + } + } + + return &bs, nil +} + +func readBodyExtMpart(dec *imapwire.Decoder, options *Options) (*imap.BodyStructureMultiPartExt, error) { + var ext imap.BodyStructureMultiPartExt + + var err error + ext.Params, err = readBodyFldParam(dec, options) + if err != nil { + return nil, fmt.Errorf("in body-fld-param: %v", err) + } + + if !dec.SP() { + return &ext, nil + } + + ext.Disposition, err = readBodyFldDsp(dec, options) + if err != nil { + return nil, fmt.Errorf("in body-fld-dsp: %v", err) + } + + if !dec.SP() { + return &ext, nil + } + + ext.Language, err = readBodyFldLang(dec) + if err != nil { + return nil, fmt.Errorf("in body-fld-lang: %v", err) + } + + if !dec.SP() { + return &ext, nil + } + + if !dec.ExpectNString(&ext.Location) { + return nil, dec.Err() + } + + return &ext, nil +} + +func readBodyFldDsp(dec *imapwire.Decoder, options *Options) (*imap.BodyStructureDisposition, error) { + if !dec.Special('(') { + if !dec.ExpectNIL() { + return nil, dec.Err() + } + return nil, nil + } + + var disp imap.BodyStructureDisposition + if !dec.ExpectString(&disp.Value) || !dec.ExpectSP() { + return nil, dec.Err() + } + + var err error + disp.Params, err = readBodyFldParam(dec, options) + if err != nil { + return nil, err + } + if !dec.ExpectSpecial(')') { + return nil, dec.Err() + } + return &disp, nil +} + +func readBodyFldParam(dec *imapwire.Decoder, options *Options) (map[string]string, error) { + var ( + params map[string]string + k string + ) + err := dec.ExpectNList(func() error { + var s string + if !dec.ExpectString(&s) { + return dec.Err() + } + + if k == "" { + k = s + } else { + if params == nil { + params = make(map[string]string) + } + decoded, _ := options.decodeText(s) + // TODO: handle error + + params[strings.ToLower(k)] = decoded + k = "" + } + + return nil + }) + if err != nil { + return nil, err + } else if k != "" { + return nil, fmt.Errorf("in body-fld-param: key without value") + } + return params, nil +} + +func readBodyFldLang(dec *imapwire.Decoder) ([]string, error) { + var l []string + isList, err := dec.List(func() error { + var s string + if !dec.ExpectString(&s) { + return dec.Err() + } + l = append(l, s) + return nil + }) + if err != nil || isList { + return l, err + } + + var s string + if !dec.ExpectNString(&s) { + return nil, dec.Err() + } + if s != "" { + return []string{s}, nil + } else { + return nil, nil + } +} + +func readSectionSpec(dec *imapwire.Decoder) (*imap.FetchItemBodySection, error) { + var section imap.FetchItemBodySection + + var dot bool + section.Part, dot = readSectionPart(dec) + if dot || len(section.Part) == 0 { + var specifier string + if dot { + if !dec.ExpectAtom(&specifier) { + return nil, dec.Err() + } + } else { + dec.Atom(&specifier) + } + specifier = strings.ToUpper(specifier) + section.Specifier = imap.PartSpecifier(specifier) + + if specifier == "HEADER.FIELDS" || specifier == "HEADER.FIELDS.NOT" { + if !dec.ExpectSP() { + return nil, dec.Err() + } + var err error + headerList, err := readHeaderList(dec) + if err != nil { + return nil, err + } + section.Specifier = imap.PartSpecifierHeader + if specifier == "HEADER.FIELDS" { + section.HeaderFields = headerList + } else { + section.HeaderFieldsNot = headerList + } + } + } + + if !dec.ExpectSpecial(']') { + return nil, dec.Err() + } + + offset, err := readPartialOffset(dec) + if err != nil { + return nil, err + } + if offset != nil { + section.Partial = &imap.SectionPartial{Offset: int64(*offset)} + } + + return §ion, nil +} + +func readPartialOffset(dec *imapwire.Decoder) (*uint32, error) { + if !dec.Special('<') { + return nil, nil + } + var offset uint32 + if !dec.ExpectNumber(&offset) || !dec.ExpectSpecial('>') { + return nil, dec.Err() + } + return &offset, nil +} + +func readHeaderList(dec *imapwire.Decoder) ([]string, error) { + var l []string + err := dec.ExpectList(func() error { + var s string + if !dec.ExpectAString(&s) { + return dec.Err() + } + l = append(l, s) + return nil + }) + return l, err +} + +func readSectionPart(dec *imapwire.Decoder) (part []int, dot bool) { + for { + dot = len(part) > 0 + if dot && !dec.Special('.') { + return part, false + } + + var num uint32 + if !dec.Number(&num) { + return part, dot + } + part = append(part, int(num)) + } +} + +type fetchLiteralReader struct { + *imapwire.LiteralReader + ch chan<- struct{} +} + +func (lit *fetchLiteralReader) Read(b []byte) (int, error) { + n, err := lit.LiteralReader.Read(b) + if err == io.EOF && lit.ch != nil { + close(lit.ch) + lit.ch = nil + } + return n, err +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/id.go b/vendor/github.com/emersion/go-imap/v2/imapclient/id.go new file mode 100644 index 0000000000..0c10d6057c --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/id.go @@ -0,0 +1,163 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// ID sends an ID command. +// +// The ID command is introduced in RFC 2971. It requires support for the ID +// extension. +// +// An example ID command: +// +// ID ("name" "go-imap" "version" "1.0" "os" "Linux" "os-version" "7.9.4" "vendor" "Yahoo") +func (c *Client) ID(idData *imap.IDData) *IDCommand { + cmd := &IDCommand{} + enc := c.beginCommand("ID", cmd) + + if idData == nil { + enc.SP().NIL() + enc.end() + return cmd + } + + enc.SP().Special('(') + isFirstKey := true + if idData.Name != "" { + addIDKeyValue(enc, &isFirstKey, "name", idData.Name) + } + if idData.Version != "" { + addIDKeyValue(enc, &isFirstKey, "version", idData.Version) + } + if idData.OS != "" { + addIDKeyValue(enc, &isFirstKey, "os", idData.OS) + } + if idData.OSVersion != "" { + addIDKeyValue(enc, &isFirstKey, "os-version", idData.OSVersion) + } + if idData.Vendor != "" { + addIDKeyValue(enc, &isFirstKey, "vendor", idData.Vendor) + } + if idData.SupportURL != "" { + addIDKeyValue(enc, &isFirstKey, "support-url", idData.SupportURL) + } + if idData.Address != "" { + addIDKeyValue(enc, &isFirstKey, "address", idData.Address) + } + if idData.Date != "" { + addIDKeyValue(enc, &isFirstKey, "date", idData.Date) + } + if idData.Command != "" { + addIDKeyValue(enc, &isFirstKey, "command", idData.Command) + } + if idData.Arguments != "" { + addIDKeyValue(enc, &isFirstKey, "arguments", idData.Arguments) + } + if idData.Environment != "" { + addIDKeyValue(enc, &isFirstKey, "environment", idData.Environment) + } + + enc.Special(')') + enc.end() + return cmd +} + +func addIDKeyValue(enc *commandEncoder, isFirstKey *bool, key, value string) { + if isFirstKey == nil { + panic("isFirstKey cannot be nil") + } else if !*isFirstKey { + enc.SP().Quoted(key).SP().Quoted(value) + } else { + enc.Quoted(key).SP().Quoted(value) + } + *isFirstKey = false +} + +func (c *Client) handleID() error { + data, err := c.readID(c.dec) + if err != nil { + return fmt.Errorf("in id: %v", err) + } + + if cmd := findPendingCmdByType[*IDCommand](c); cmd != nil { + cmd.data = *data + } + + return nil +} + +func (c *Client) readID(dec *imapwire.Decoder) (*imap.IDData, error) { + var data = imap.IDData{} + + if !dec.ExpectSP() { + return nil, dec.Err() + } + + if dec.ExpectNIL() { + return &data, nil + } + + currKey := "" + err := dec.ExpectList(func() error { + var keyOrValue string + if !dec.String(&keyOrValue) { + return fmt.Errorf("in id key-val list: %v", dec.Err()) + } + + if currKey == "" { + currKey = keyOrValue + return nil + } + + switch currKey { + case "name": + data.Name = keyOrValue + case "version": + data.Version = keyOrValue + case "os": + data.OS = keyOrValue + case "os-version": + data.OSVersion = keyOrValue + case "vendor": + data.Vendor = keyOrValue + case "support-url": + data.SupportURL = keyOrValue + case "address": + data.Address = keyOrValue + case "date": + data.Date = keyOrValue + case "command": + data.Command = keyOrValue + case "arguments": + data.Arguments = keyOrValue + case "environment": + data.Environment = keyOrValue + default: + // Ignore unknown key + // Yahoo server sends "host" and "remote-host" keys + // which are not defined in RFC 2971 + } + currKey = "" + + return nil + }) + + if err != nil { + return nil, err + } + + return &data, nil +} + +type IDCommand struct { + commandBase + data imap.IDData +} + +func (r *IDCommand) Wait() (*imap.IDData, error) { + return &r.data, r.wait() +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/idle.go b/vendor/github.com/emersion/go-imap/v2/imapclient/idle.go new file mode 100644 index 0000000000..1613bff33a --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/idle.go @@ -0,0 +1,157 @@ +package imapclient + +import ( + "fmt" + "sync/atomic" + "time" +) + +const idleRestartInterval = 28 * time.Minute + +// Idle sends an IDLE command. +// +// Unlike other commands, this method blocks until the server acknowledges it. +// On success, the IDLE command is running and other commands cannot be sent. +// The caller must invoke IdleCommand.Close to stop IDLE and unblock the +// client. +// +// This command requires support for IMAP4rev2 or the IDLE extension. The IDLE +// command is restarted automatically to avoid getting disconnected due to +// inactivity timeouts. +func (c *Client) Idle() (*IdleCommand, error) { + child, err := c.idle() + if err != nil { + return nil, err + } + + cmd := &IdleCommand{ + stop: make(chan struct{}), + done: make(chan struct{}), + } + go cmd.run(c, child) + return cmd, nil +} + +// IdleCommand is an IDLE command. +// +// Initially, the IDLE command is running. The server may send unilateral +// data. The client cannot send any command while IDLE is running. +// +// Close must be called to stop the IDLE command. +type IdleCommand struct { + stopped atomic.Bool + stop chan struct{} + done chan struct{} + + err error + lastChild *idleCommand +} + +func (cmd *IdleCommand) run(c *Client, child *idleCommand) { + defer close(cmd.done) + + timer := time.NewTimer(idleRestartInterval) + defer timer.Stop() + + defer func() { + if child != nil { + if err := child.Close(); err != nil && cmd.err == nil { + cmd.err = err + } + } + }() + + for { + select { + case <-timer.C: + timer.Reset(idleRestartInterval) + + if cmd.err = child.Close(); cmd.err != nil { + return + } + if child, cmd.err = c.idle(); cmd.err != nil { + return + } + case <-c.decCh: + cmd.lastChild = child + return + case <-cmd.stop: + cmd.lastChild = child + return + } + } +} + +// Close stops the IDLE command. +// +// This method blocks until the command to stop IDLE is written, but doesn't +// wait for the server to respond. Callers can use Wait for this purpose. +func (cmd *IdleCommand) Close() error { + if cmd.stopped.Swap(true) { + return fmt.Errorf("imapclient: IDLE already closed") + } + close(cmd.stop) + <-cmd.done + return cmd.err +} + +// Wait blocks until the IDLE command has completed. +func (cmd *IdleCommand) Wait() error { + <-cmd.done + if cmd.err != nil { + return cmd.err + } + return cmd.lastChild.Wait() +} + +func (c *Client) idle() (*idleCommand, error) { + cmd := &idleCommand{} + contReq := c.registerContReq(cmd) + cmd.enc = c.beginCommand("IDLE", cmd) + cmd.enc.flush() + + _, err := contReq.Wait() + if err != nil { + cmd.enc.end() + return nil, err + } + + return cmd, nil +} + +// idleCommand represents a singular IDLE command, without the restart logic. +type idleCommand struct { + commandBase + enc *commandEncoder +} + +// Close stops the IDLE command. +// +// This method blocks until the command to stop IDLE is written, but doesn't +// wait for the server to respond. Callers can use Wait for this purpose. +func (cmd *idleCommand) Close() error { + if cmd.err != nil { + return cmd.err + } + if cmd.enc == nil { + return fmt.Errorf("imapclient: IDLE command closed twice") + } + cmd.enc.client.setWriteTimeout(cmdWriteTimeout) + _, err := cmd.enc.client.bw.WriteString("DONE\r\n") + if err == nil { + err = cmd.enc.client.bw.Flush() + } + cmd.enc.end() + cmd.enc = nil + return err +} + +// Wait blocks until the IDLE command has completed. +// +// Wait can only be called after Close. +func (cmd *idleCommand) Wait() error { + if cmd.enc != nil { + panic("imapclient: idleCommand.Close must be called before Wait") + } + return cmd.wait() +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/list.go b/vendor/github.com/emersion/go-imap/v2/imapclient/list.go new file mode 100644 index 0000000000..2c0ce16803 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/list.go @@ -0,0 +1,259 @@ +package imapclient + +import ( + "fmt" + "strings" + "unicode/utf8" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +func getSelectOpts(options *imap.ListOptions) []string { + if options == nil { + return nil + } + + var l []string + if options.SelectSubscribed { + l = append(l, "SUBSCRIBED") + } + if options.SelectRemote { + l = append(l, "REMOTE") + } + if options.SelectRecursiveMatch { + l = append(l, "RECURSIVEMATCH") + } + if options.SelectSpecialUse { + l = append(l, "SPECIAL-USE") + } + return l +} + +func getReturnOpts(options *imap.ListOptions) []string { + if options == nil { + return nil + } + + var l []string + if options.ReturnSubscribed { + l = append(l, "SUBSCRIBED") + } + if options.ReturnChildren { + l = append(l, "CHILDREN") + } + if options.ReturnStatus != nil { + l = append(l, "STATUS") + } + if options.ReturnSpecialUse { + l = append(l, "SPECIAL-USE") + } + return l +} + +// List sends a LIST command. +// +// The caller must fully consume the ListCommand. A simple way to do so is to +// defer a call to ListCommand.Close. +// +// A nil options pointer is equivalent to a zero options value. +// +// A non-zero options value requires support for IMAP4rev2 or the LIST-EXTENDED +// extension. +func (c *Client) List(ref, pattern string, options *imap.ListOptions) *ListCommand { + cmd := &ListCommand{ + mailboxes: make(chan *imap.ListData, 64), + returnStatus: options != nil && options.ReturnStatus != nil, + } + enc := c.beginCommand("LIST", cmd) + if selectOpts := getSelectOpts(options); len(selectOpts) > 0 { + enc.SP().List(len(selectOpts), func(i int) { + enc.Atom(selectOpts[i]) + }) + } + enc.SP().Mailbox(ref).SP().Mailbox(pattern) + if returnOpts := getReturnOpts(options); len(returnOpts) > 0 { + enc.SP().Atom("RETURN").SP().List(len(returnOpts), func(i int) { + opt := returnOpts[i] + enc.Atom(opt) + if opt == "STATUS" { + returnStatus := statusItems(options.ReturnStatus) + enc.SP().List(len(returnStatus), func(j int) { + enc.Atom(returnStatus[j]) + }) + } + }) + } + enc.end() + return cmd +} + +func (c *Client) handleList() error { + data, err := readList(c.dec) + if err != nil { + return fmt.Errorf("in LIST: %v", err) + } + + cmd := c.findPendingCmdFunc(func(cmd command) bool { + switch cmd := cmd.(type) { + case *ListCommand: + return true // TODO: match pattern, check if already handled + case *SelectCommand: + return cmd.mailbox == data.Mailbox && cmd.data.List == nil + default: + return false + } + }) + switch cmd := cmd.(type) { + case *ListCommand: + if cmd.returnStatus { + if cmd.pendingData != nil { + cmd.mailboxes <- cmd.pendingData + } + cmd.pendingData = data + } else { + cmd.mailboxes <- data + } + case *SelectCommand: + cmd.data.List = data + } + + return nil +} + +// ListCommand is a LIST command. +type ListCommand struct { + commandBase + mailboxes chan *imap.ListData + + returnStatus bool + pendingData *imap.ListData +} + +// Next advances to the next mailbox. +// +// On success, the mailbox LIST data is returned. On error or if there are no +// more mailboxes, nil is returned. +func (cmd *ListCommand) Next() *imap.ListData { + return <-cmd.mailboxes +} + +// Close releases the command. +// +// Calling Close unblocks the IMAP client decoder and lets it read the next +// responses. Next will always return nil after Close. +func (cmd *ListCommand) Close() error { + for cmd.Next() != nil { + // ignore + } + return cmd.wait() +} + +// Collect accumulates mailboxes into a list. +// +// This is equivalent to calling Next repeatedly and then Close. +func (cmd *ListCommand) Collect() ([]*imap.ListData, error) { + var l []*imap.ListData + for { + data := cmd.Next() + if data == nil { + break + } + l = append(l, data) + } + return l, cmd.Close() +} + +func readList(dec *imapwire.Decoder) (*imap.ListData, error) { + var data imap.ListData + + var err error + data.Attrs, err = internal.ExpectMailboxAttrList(dec) + if err != nil { + return nil, fmt.Errorf("in mbx-list-flags: %w", err) + } + + if !dec.ExpectSP() { + return nil, dec.Err() + } + + data.Delim, err = readDelim(dec) + if err != nil { + return nil, err + } + + if !dec.ExpectSP() || !dec.ExpectMailbox(&data.Mailbox) { + return nil, dec.Err() + } + + if dec.SP() { + err := dec.ExpectList(func() error { + var tag string + if !dec.ExpectAString(&tag) || !dec.ExpectSP() { + return dec.Err() + } + var err error + switch strings.ToUpper(tag) { + case "CHILDINFO": + data.ChildInfo, err = readChildInfoExtendedItem(dec) + if err != nil { + return fmt.Errorf("in childinfo-extended-item: %v", err) + } + case "OLDNAME": + data.OldName, err = readOldNameExtendedItem(dec) + if err != nil { + return fmt.Errorf("in oldname-extended-item: %v", err) + } + default: + if !dec.DiscardValue() { + return fmt.Errorf("in tagged-ext-val: %v", err) + } + } + return nil + }) + if err != nil { + return nil, fmt.Errorf("in mbox-list-extended: %v", err) + } + } + + return &data, nil +} + +func readChildInfoExtendedItem(dec *imapwire.Decoder) (*imap.ListDataChildInfo, error) { + var childInfo imap.ListDataChildInfo + err := dec.ExpectList(func() error { + var opt string + if !dec.ExpectAString(&opt) { + return dec.Err() + } + if strings.ToUpper(opt) == "SUBSCRIBED" { + childInfo.Subscribed = true + } + return nil + }) + return &childInfo, err +} + +func readOldNameExtendedItem(dec *imapwire.Decoder) (string, error) { + var name string + if !dec.ExpectSpecial('(') || !dec.ExpectMailbox(&name) || !dec.ExpectSpecial(')') { + return "", dec.Err() + } + return name, nil +} + +func readDelim(dec *imapwire.Decoder) (rune, error) { + var delimStr string + if dec.Quoted(&delimStr) { + delim, size := utf8.DecodeRuneInString(delimStr) + if delim == utf8.RuneError || size != len(delimStr) { + return 0, fmt.Errorf("mailbox delimiter must be a single rune") + } + return delim, nil + } else if !dec.ExpectNIL() { + return 0, dec.Err() + } else { + return 0, nil + } +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/metadata.go b/vendor/github.com/emersion/go-imap/v2/imapclient/metadata.go new file mode 100644 index 0000000000..c8a0e72827 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/metadata.go @@ -0,0 +1,205 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +type GetMetadataDepth int + +const ( + GetMetadataDepthZero GetMetadataDepth = 0 + GetMetadataDepthOne GetMetadataDepth = 1 + GetMetadataDepthInfinity GetMetadataDepth = -1 +) + +func (depth GetMetadataDepth) String() string { + switch depth { + case GetMetadataDepthZero: + return "0" + case GetMetadataDepthOne: + return "1" + case GetMetadataDepthInfinity: + return "infinity" + default: + panic(fmt.Errorf("imapclient: unknown GETMETADATA depth %d", depth)) + } +} + +// GetMetadataOptions contains options for the GETMETADATA command. +type GetMetadataOptions struct { + MaxSize *uint32 + Depth GetMetadataDepth +} + +func (options *GetMetadataOptions) names() []string { + if options == nil { + return nil + } + var l []string + if options.MaxSize != nil { + l = append(l, "MAXSIZE") + } + if options.Depth != GetMetadataDepthZero { + l = append(l, "DEPTH") + } + return l +} + +// GetMetadata sends a GETMETADATA command. +// +// This command requires support for the METADATA or METADATA-SERVER extension. +func (c *Client) GetMetadata(mailbox string, entries []string, options *GetMetadataOptions) *GetMetadataCommand { + cmd := &GetMetadataCommand{mailbox: mailbox} + enc := c.beginCommand("GETMETADATA", cmd) + enc.SP().Mailbox(mailbox) + if opts := options.names(); len(opts) > 0 { + enc.SP().List(len(opts), func(i int) { + opt := opts[i] + enc.Atom(opt).SP() + switch opt { + case "MAXSIZE": + enc.Number(*options.MaxSize) + case "DEPTH": + enc.Atom(options.Depth.String()) + default: + panic(fmt.Errorf("imapclient: unknown GETMETADATA option %q", opt)) + } + }) + } + enc.SP().List(len(entries), func(i int) { + enc.String(entries[i]) + }) + enc.end() + return cmd +} + +// SetMetadata sends a SETMETADATA command. +// +// To remove an entry, set it to nil. +// +// This command requires support for the METADATA or METADATA-SERVER extension. +func (c *Client) SetMetadata(mailbox string, entries map[string]*[]byte) *Command { + cmd := &Command{} + enc := c.beginCommand("SETMETADATA", cmd) + enc.SP().Mailbox(mailbox).SP().Special('(') + i := 0 + for k, v := range entries { + if i > 0 { + enc.SP() + } + enc.String(k).SP() + if v == nil { + enc.NIL() + } else { + enc.String(string(*v)) // TODO: use literals if required + } + i++ + } + enc.Special(')') + enc.end() + return cmd +} + +func (c *Client) handleMetadata() error { + data, err := readMetadataResp(c.dec) + if err != nil { + return fmt.Errorf("in metadata-resp: %v", err) + } + + cmd := c.findPendingCmdFunc(func(anyCmd command) bool { + cmd, ok := anyCmd.(*GetMetadataCommand) + return ok && cmd.mailbox == data.Mailbox + }) + if cmd != nil && len(data.EntryValues) > 0 { + cmd := cmd.(*GetMetadataCommand) + cmd.data.Mailbox = data.Mailbox + if cmd.data.Entries == nil { + cmd.data.Entries = make(map[string]*[]byte) + } + // The server might send multiple METADATA responses for a single + // METADATA command + for k, v := range data.EntryValues { + cmd.data.Entries[k] = v + } + } else if handler := c.options.unilateralDataHandler().Metadata; handler != nil && len(data.EntryList) > 0 { + handler(data.Mailbox, data.EntryList) + } + + return nil +} + +// GetMetadataCommand is a GETMETADATA command. +type GetMetadataCommand struct { + commandBase + mailbox string + data GetMetadataData +} + +func (cmd *GetMetadataCommand) Wait() (*GetMetadataData, error) { + return &cmd.data, cmd.wait() +} + +// GetMetadataData is the data returned by the GETMETADATA command. +type GetMetadataData struct { + Mailbox string + Entries map[string]*[]byte +} + +type metadataResp struct { + Mailbox string + EntryList []string + EntryValues map[string]*[]byte +} + +func readMetadataResp(dec *imapwire.Decoder) (*metadataResp, error) { + var data metadataResp + + if !dec.ExpectMailbox(&data.Mailbox) || !dec.ExpectSP() { + return nil, dec.Err() + } + + isList, err := dec.List(func() error { + var name string + if !dec.ExpectAString(&name) || !dec.ExpectSP() { + return dec.Err() + } + + // TODO: decode as []byte + var ( + value *[]byte + s string + ) + if dec.String(&s) || dec.Literal(&s) { + b := []byte(s) + value = &b + } else if !dec.ExpectNIL() { + return dec.Err() + } + + if data.EntryValues == nil { + data.EntryValues = make(map[string]*[]byte) + } + data.EntryValues[name] = value + return nil + }) + if err != nil { + return nil, err + } else if !isList { + var name string + if !dec.ExpectAString(&name) { + return nil, dec.Err() + } + data.EntryList = append(data.EntryList, name) + + for dec.SP() { + if !dec.ExpectAString(&name) { + return nil, dec.Err() + } + data.EntryList = append(data.EntryList, name) + } + } + + return &data, nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/move.go b/vendor/github.com/emersion/go-imap/v2/imapclient/move.go new file mode 100644 index 0000000000..6fa0b62e2d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/move.go @@ -0,0 +1,74 @@ +package imapclient + +import ( + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// Move sends a MOVE command. +// +// If the server doesn't support IMAP4rev2 nor the MOVE extension, a fallback +// with COPY + STORE + EXPUNGE commands is used. +func (c *Client) Move(numSet imap.NumSet, mailbox string) *MoveCommand { + // If the server doesn't support MOVE, fallback to [UID] COPY, + // [UID] STORE +FLAGS.SILENT \Deleted and [UID] EXPUNGE + cmdName := "MOVE" + if !c.Caps().Has(imap.CapMove) { + cmdName = "COPY" + } + + cmd := &MoveCommand{} + enc := c.beginCommand(uidCmdName(cmdName, imapwire.NumSetKind(numSet)), cmd) + enc.SP().NumSet(numSet).SP().Mailbox(mailbox) + enc.end() + + if cmdName == "COPY" { + cmd.store = c.Store(numSet, &imap.StoreFlags{ + Op: imap.StoreFlagsAdd, + Silent: true, + Flags: []imap.Flag{imap.FlagDeleted}, + }, nil) + if uidSet, ok := numSet.(imap.UIDSet); ok && c.Caps().Has(imap.CapUIDPlus) { + cmd.expunge = c.UIDExpunge(uidSet) + } else { + cmd.expunge = c.Expunge() + } + } + + return cmd +} + +// MoveCommand is a MOVE command. +type MoveCommand struct { + commandBase + data MoveData + + // Fallback + store *FetchCommand + expunge *ExpungeCommand +} + +func (cmd *MoveCommand) Wait() (*MoveData, error) { + if err := cmd.wait(); err != nil { + return nil, err + } + if cmd.store != nil { + if err := cmd.store.Close(); err != nil { + return nil, err + } + } + if cmd.expunge != nil { + if err := cmd.expunge.Close(); err != nil { + return nil, err + } + } + return &cmd.data, nil +} + +// MoveData contains the data returned by a MOVE command. +type MoveData struct { + // requires UIDPLUS or IMAP4rev2 + UIDValidity uint32 + SourceUIDs imap.NumSet + DestUIDs imap.NumSet +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/namespace.go b/vendor/github.com/emersion/go-imap/v2/imapclient/namespace.go new file mode 100644 index 0000000000..8c4738ea50 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/namespace.go @@ -0,0 +1,110 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// Namespace sends a NAMESPACE command. +// +// This command requires support for IMAP4rev2 or the NAMESPACE extension. +func (c *Client) Namespace() *NamespaceCommand { + cmd := &NamespaceCommand{} + c.beginCommand("NAMESPACE", cmd).end() + return cmd +} + +func (c *Client) handleNamespace() error { + data, err := readNamespaceResponse(c.dec) + if err != nil { + return fmt.Errorf("in namespace-response: %v", err) + } + if cmd := findPendingCmdByType[*NamespaceCommand](c); cmd != nil { + cmd.data = *data + } + return nil +} + +// NamespaceCommand is a NAMESPACE command. +type NamespaceCommand struct { + commandBase + data imap.NamespaceData +} + +func (cmd *NamespaceCommand) Wait() (*imap.NamespaceData, error) { + return &cmd.data, cmd.wait() +} + +func readNamespaceResponse(dec *imapwire.Decoder) (*imap.NamespaceData, error) { + var ( + data imap.NamespaceData + err error + ) + + data.Personal, err = readNamespace(dec) + if err != nil { + return nil, err + } + + if !dec.ExpectSP() { + return nil, dec.Err() + } + + data.Other, err = readNamespace(dec) + if err != nil { + return nil, err + } + + if !dec.ExpectSP() { + return nil, dec.Err() + } + + data.Shared, err = readNamespace(dec) + if err != nil { + return nil, err + } + + return &data, nil +} + +func readNamespace(dec *imapwire.Decoder) ([]imap.NamespaceDescriptor, error) { + var l []imap.NamespaceDescriptor + err := dec.ExpectNList(func() error { + descr, err := readNamespaceDescr(dec) + if err != nil { + return fmt.Errorf("in namespace-descr: %v", err) + } + l = append(l, *descr) + return nil + }) + return l, err +} + +func readNamespaceDescr(dec *imapwire.Decoder) (*imap.NamespaceDescriptor, error) { + var descr imap.NamespaceDescriptor + + if !dec.ExpectSpecial('(') || !dec.ExpectString(&descr.Prefix) || !dec.ExpectSP() { + return nil, dec.Err() + } + + var err error + descr.Delim, err = readDelim(dec) + if err != nil { + return nil, err + } + + // Skip namespace-response-extensions + for dec.SP() { + if !dec.DiscardValue() { + return nil, dec.Err() + } + } + + if !dec.ExpectSpecial(')') { + return nil, dec.Err() + } + + return &descr, nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/quota.go b/vendor/github.com/emersion/go-imap/v2/imapclient/quota.go new file mode 100644 index 0000000000..6775b9f646 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/quota.go @@ -0,0 +1,176 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// GetQuota sends a GETQUOTA command. +// +// This command requires support for the QUOTA extension. +func (c *Client) GetQuota(root string) *GetQuotaCommand { + cmd := &GetQuotaCommand{root: root} + enc := c.beginCommand("GETQUOTA", cmd) + enc.SP().String(root) + enc.end() + return cmd +} + +// GetQuotaRoot sends a GETQUOTAROOT command. +// +// This command requires support for the QUOTA extension. +func (c *Client) GetQuotaRoot(mailbox string) *GetQuotaRootCommand { + cmd := &GetQuotaRootCommand{mailbox: mailbox} + enc := c.beginCommand("GETQUOTAROOT", cmd) + enc.SP().Mailbox(mailbox) + enc.end() + return cmd +} + +// SetQuota sends a SETQUOTA command. +// +// This command requires support for the SETQUOTA extension. +func (c *Client) SetQuota(root string, limits map[imap.QuotaResourceType]int64) *Command { + // TODO: consider returning the QUOTA response data? + cmd := &Command{} + enc := c.beginCommand("SETQUOTA", cmd) + enc.SP().String(root).SP().Special('(') + i := 0 + for typ, limit := range limits { + if i > 0 { + enc.SP() + } + enc.Atom(string(typ)).SP().Number64(limit) + i++ + } + enc.Special(')') + enc.end() + return cmd +} + +func (c *Client) handleQuota() error { + data, err := readQuotaResponse(c.dec) + if err != nil { + return fmt.Errorf("in quota-response: %v", err) + } + + cmd := c.findPendingCmdFunc(func(cmd command) bool { + switch cmd := cmd.(type) { + case *GetQuotaCommand: + return cmd.root == data.Root + case *GetQuotaRootCommand: + for _, root := range cmd.roots { + if root == data.Root { + return true + } + } + return false + default: + return false + } + }) + switch cmd := cmd.(type) { + case *GetQuotaCommand: + cmd.data = data + case *GetQuotaRootCommand: + cmd.data = append(cmd.data, *data) + } + return nil +} + +func (c *Client) handleQuotaRoot() error { + mailbox, roots, err := readQuotaRoot(c.dec) + if err != nil { + return fmt.Errorf("in quotaroot-response: %v", err) + } + + cmd := c.findPendingCmdFunc(func(anyCmd command) bool { + cmd, ok := anyCmd.(*GetQuotaRootCommand) + if !ok { + return false + } + return cmd.mailbox == mailbox + }) + if cmd != nil { + cmd := cmd.(*GetQuotaRootCommand) + cmd.roots = roots + } + return nil +} + +// GetQuotaCommand is a GETQUOTA command. +type GetQuotaCommand struct { + commandBase + root string + data *QuotaData +} + +func (cmd *GetQuotaCommand) Wait() (*QuotaData, error) { + if err := cmd.wait(); err != nil { + return nil, err + } + return cmd.data, nil +} + +// GetQuotaRootCommand is a GETQUOTAROOT command. +type GetQuotaRootCommand struct { + commandBase + mailbox string + roots []string + data []QuotaData +} + +func (cmd *GetQuotaRootCommand) Wait() ([]QuotaData, error) { + if err := cmd.wait(); err != nil { + return nil, err + } + return cmd.data, nil +} + +// QuotaData is the data returned by a QUOTA response. +type QuotaData struct { + Root string + Resources map[imap.QuotaResourceType]QuotaResourceData +} + +// QuotaResourceData contains the usage and limit for a quota resource. +type QuotaResourceData struct { + Usage int64 + Limit int64 +} + +func readQuotaResponse(dec *imapwire.Decoder) (*QuotaData, error) { + var data QuotaData + if !dec.ExpectAString(&data.Root) || !dec.ExpectSP() { + return nil, dec.Err() + } + data.Resources = make(map[imap.QuotaResourceType]QuotaResourceData) + err := dec.ExpectList(func() error { + var ( + name string + resData QuotaResourceData + ) + if !dec.ExpectAtom(&name) || !dec.ExpectSP() || !dec.ExpectNumber64(&resData.Usage) || !dec.ExpectSP() || !dec.ExpectNumber64(&resData.Limit) { + return fmt.Errorf("in quota-resource: %v", dec.Err()) + } + data.Resources[imap.QuotaResourceType(name)] = resData + return nil + }) + return &data, err +} + +func readQuotaRoot(dec *imapwire.Decoder) (mailbox string, roots []string, err error) { + if !dec.ExpectMailbox(&mailbox) { + return "", nil, dec.Err() + } + for dec.SP() { + var root string + if !dec.ExpectAString(&root) { + return "", nil, dec.Err() + } + roots = append(roots, root) + } + return mailbox, roots, nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/search.go b/vendor/github.com/emersion/go-imap/v2/imapclient/search.go new file mode 100644 index 0000000000..ee2b2b9bb8 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/search.go @@ -0,0 +1,401 @@ +package imapclient + +import ( + "fmt" + "strings" + "time" + "unicode" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +func returnSearchOptions(options *imap.SearchOptions) []string { + if options == nil { + return nil + } + + m := map[string]bool{ + "MIN": options.ReturnMin, + "MAX": options.ReturnMax, + "ALL": options.ReturnAll, + "COUNT": options.ReturnCount, + } + + var l []string + for k, ret := range m { + if ret { + l = append(l, k) + } + } + return l +} + +func (c *Client) search(numKind imapwire.NumKind, criteria *imap.SearchCriteria, options *imap.SearchOptions) *SearchCommand { + // The IMAP4rev2 SEARCH charset defaults to UTF-8. When UTF8=ACCEPT is + // enabled, specifying any CHARSET is invalid. For IMAP4rev1 the default is + // undefined and only US-ASCII support is required. What's more, some + // servers completely reject the CHARSET keyword. So, let's check if we + // actually have UTF-8 strings in the search criteria before using that. + // TODO: there might be a benefit in specifying CHARSET UTF-8 for IMAP4rev1 + // servers even if we only send ASCII characters: the server then must + // decode encoded headers and Content-Transfer-Encoding before matching the + // criteria. + var charset string + if !c.Caps().Has(imap.CapIMAP4rev2) && !c.enabled.Has(imap.CapUTF8Accept) && !searchCriteriaIsASCII(criteria) { + charset = "UTF-8" + } + + var all imap.NumSet + switch numKind { + case imapwire.NumKindSeq: + all = imap.SeqSet(nil) + case imapwire.NumKindUID: + all = imap.UIDSet(nil) + } + + cmd := &SearchCommand{} + cmd.data.All = all + enc := c.beginCommand(uidCmdName("SEARCH", numKind), cmd) + if returnOpts := returnSearchOptions(options); len(returnOpts) > 0 { + enc.SP().Atom("RETURN").SP().List(len(returnOpts), func(i int) { + enc.Atom(returnOpts[i]) + }) + } + enc.SP() + if charset != "" { + enc.Atom("CHARSET").SP().Atom(charset).SP() + } + writeSearchKey(enc.Encoder, criteria) + enc.end() + return cmd +} + +// Search sends a SEARCH command. +func (c *Client) Search(criteria *imap.SearchCriteria, options *imap.SearchOptions) *SearchCommand { + return c.search(imapwire.NumKindSeq, criteria, options) +} + +// UIDSearch sends a UID SEARCH command. +func (c *Client) UIDSearch(criteria *imap.SearchCriteria, options *imap.SearchOptions) *SearchCommand { + return c.search(imapwire.NumKindUID, criteria, options) +} + +func (c *Client) handleSearch() error { + cmd := findPendingCmdByType[*SearchCommand](c) + for c.dec.SP() { + if c.dec.Special('(') { + var name string + if !c.dec.ExpectAtom(&name) || !c.dec.ExpectSP() { + return c.dec.Err() + } else if strings.ToUpper(name) != "MODSEQ" { + return fmt.Errorf("in search-sort-mod-seq: expected %q, got %q", "MODSEQ", name) + } + var modSeq uint64 + if !c.dec.ExpectModSeq(&modSeq) || !c.dec.ExpectSpecial(')') { + return c.dec.Err() + } + if cmd != nil { + cmd.data.ModSeq = modSeq + } + break + } + + var num uint32 + if !c.dec.ExpectNumber(&num) { + return c.dec.Err() + } + if cmd != nil { + switch all := cmd.data.All.(type) { + case imap.SeqSet: + all.AddNum(num) + cmd.data.All = all + case imap.UIDSet: + all.AddNum(imap.UID(num)) + cmd.data.All = all + } + } + } + return nil +} + +func (c *Client) handleESearch() error { + if !c.dec.ExpectSP() { + return c.dec.Err() + } + tag, data, err := readESearchResponse(c.dec) + if err != nil { + return err + } + cmd := c.findPendingCmdFunc(func(anyCmd command) bool { + cmd, ok := anyCmd.(*SearchCommand) + if !ok { + return false + } + if tag != "" { + return cmd.tag == tag + } else { + return true + } + }) + if cmd != nil { + cmd := cmd.(*SearchCommand) + cmd.data = *data + } + return nil +} + +// SearchCommand is a SEARCH command. +type SearchCommand struct { + commandBase + data imap.SearchData +} + +func (cmd *SearchCommand) Wait() (*imap.SearchData, error) { + return &cmd.data, cmd.wait() +} + +func writeSearchKey(enc *imapwire.Encoder, criteria *imap.SearchCriteria) { + firstItem := true + encodeItem := func() *imapwire.Encoder { + if !firstItem { + enc.SP() + } + firstItem = false + return enc + } + + for _, seqSet := range criteria.SeqNum { + encodeItem().NumSet(seqSet) + } + for _, uidSet := range criteria.UID { + encodeItem().Atom("UID").SP().NumSet(uidSet) + } + + if !criteria.Since.IsZero() && !criteria.Before.IsZero() && criteria.Before.Sub(criteria.Since) == 24*time.Hour { + encodeItem().Atom("ON").SP().String(criteria.Since.Format(internal.DateLayout)) + } else { + if !criteria.Since.IsZero() { + encodeItem().Atom("SINCE").SP().String(criteria.Since.Format(internal.DateLayout)) + } + if !criteria.Before.IsZero() { + encodeItem().Atom("BEFORE").SP().String(criteria.Before.Format(internal.DateLayout)) + } + } + if !criteria.SentSince.IsZero() && !criteria.SentBefore.IsZero() && criteria.SentBefore.Sub(criteria.SentSince) == 24*time.Hour { + encodeItem().Atom("SENTON").SP().String(criteria.SentSince.Format(internal.DateLayout)) + } else { + if !criteria.SentSince.IsZero() { + encodeItem().Atom("SENTSINCE").SP().String(criteria.SentSince.Format(internal.DateLayout)) + } + if !criteria.SentBefore.IsZero() { + encodeItem().Atom("SENTBEFORE").SP().String(criteria.SentBefore.Format(internal.DateLayout)) + } + } + + for _, kv := range criteria.Header { + switch k := strings.ToUpper(kv.Key); k { + case "BCC", "CC", "FROM", "SUBJECT", "TO": + encodeItem().Atom(k) + default: + encodeItem().Atom("HEADER").SP().String(kv.Key) + } + enc.SP().String(kv.Value) + } + + for _, s := range criteria.Body { + encodeItem().Atom("BODY").SP().String(s) + } + for _, s := range criteria.Text { + encodeItem().Atom("TEXT").SP().String(s) + } + + for _, flag := range criteria.Flag { + if k := flagSearchKey(flag); k != "" { + encodeItem().Atom(k) + } else { + encodeItem().Atom("KEYWORD").SP().Flag(flag) + } + } + for _, flag := range criteria.NotFlag { + if k := flagSearchKey(flag); k != "" { + encodeItem().Atom("UN" + k) + } else { + encodeItem().Atom("UNKEYWORD").SP().Flag(flag) + } + } + + if criteria.Larger > 0 { + encodeItem().Atom("LARGER").SP().Number64(criteria.Larger) + } + if criteria.Smaller > 0 { + encodeItem().Atom("SMALLER").SP().Number64(criteria.Smaller) + } + + if modSeq := criteria.ModSeq; modSeq != nil { + encodeItem().Atom("MODSEQ") + if modSeq.MetadataName != "" && modSeq.MetadataType != "" { + enc.SP().Quoted(modSeq.MetadataName).SP().Atom(string(modSeq.MetadataType)) + } + enc.SP() + if modSeq.ModSeq != 0 { + enc.ModSeq(modSeq.ModSeq) + } else { + enc.Atom("0") + } + } + + for _, not := range criteria.Not { + encodeItem().Atom("NOT").SP() + enc.Special('(') + writeSearchKey(enc, ¬) + enc.Special(')') + } + for _, or := range criteria.Or { + encodeItem().Atom("OR").SP() + enc.Special('(') + writeSearchKey(enc, &or[0]) + enc.Special(')') + enc.SP() + enc.Special('(') + writeSearchKey(enc, &or[1]) + enc.Special(')') + } + + if firstItem { + enc.Atom("ALL") + } +} + +func flagSearchKey(flag imap.Flag) string { + switch flag { + case imap.FlagAnswered, imap.FlagDeleted, imap.FlagDraft, imap.FlagFlagged, imap.FlagSeen: + return strings.ToUpper(strings.TrimPrefix(string(flag), "\\")) + default: + return "" + } +} + +func readESearchResponse(dec *imapwire.Decoder) (tag string, data *imap.SearchData, err error) { + data = &imap.SearchData{} + if dec.Special('(') { // search-correlator + var correlator string + if !dec.ExpectAtom(&correlator) || !dec.ExpectSP() || !dec.ExpectAString(&tag) || !dec.ExpectSpecial(')') { + return "", nil, dec.Err() + } + if correlator != "TAG" { + return "", nil, fmt.Errorf("in search-correlator: name must be TAG, but got %q", correlator) + } + } + + var name string + if !dec.SP() { + return tag, data, nil + } else if !dec.ExpectAtom(&name) { + return "", nil, dec.Err() + } + data.UID = name == "UID" + + if data.UID { + if !dec.SP() { + return tag, data, nil + } else if !dec.ExpectAtom(&name) { + return "", nil, dec.Err() + } + } + + for { + if !dec.ExpectSP() { + return "", nil, dec.Err() + } + + switch strings.ToUpper(name) { + case "MIN": + var num uint32 + if !dec.ExpectNumber(&num) { + return "", nil, dec.Err() + } + data.Min = num + case "MAX": + var num uint32 + if !dec.ExpectNumber(&num) { + return "", nil, dec.Err() + } + data.Max = num + case "ALL": + numKind := imapwire.NumKindSeq + if data.UID { + numKind = imapwire.NumKindUID + } + if !dec.ExpectNumSet(numKind, &data.All) { + return "", nil, dec.Err() + } + if data.All.Dynamic() { + return "", nil, fmt.Errorf("imapclient: server returned a dynamic ALL number set in SEARCH response") + } + case "COUNT": + var num uint32 + if !dec.ExpectNumber(&num) { + return "", nil, dec.Err() + } + data.Count = num + case "MODSEQ": + var modSeq uint64 + if !dec.ExpectModSeq(&modSeq) { + return "", nil, dec.Err() + } + data.ModSeq = modSeq + default: + if !dec.DiscardValue() { + return "", nil, dec.Err() + } + } + + if !dec.SP() { + break + } else if !dec.ExpectAtom(&name) { + return "", nil, dec.Err() + } + } + + return tag, data, nil +} + +func searchCriteriaIsASCII(criteria *imap.SearchCriteria) bool { + for _, kv := range criteria.Header { + if !isASCII(kv.Key) || !isASCII(kv.Value) { + return false + } + } + for _, s := range criteria.Body { + if !isASCII(s) { + return false + } + } + for _, s := range criteria.Text { + if !isASCII(s) { + return false + } + } + for _, not := range criteria.Not { + if !searchCriteriaIsASCII(¬) { + return false + } + } + for _, or := range criteria.Or { + if !searchCriteriaIsASCII(&or[0]) || !searchCriteriaIsASCII(&or[1]) { + return false + } + } + return true +} + +func isASCII(s string) bool { + for i := 0; i < len(s); i++ { + if s[i] > unicode.MaxASCII { + return false + } + } + return true +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/select.go b/vendor/github.com/emersion/go-imap/v2/imapclient/select.go new file mode 100644 index 0000000000..90cd11545b --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/select.go @@ -0,0 +1,100 @@ +package imapclient + +import ( + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal" +) + +// Select sends a SELECT or EXAMINE command. +// +// A nil options pointer is equivalent to a zero options value. +func (c *Client) Select(mailbox string, options *imap.SelectOptions) *SelectCommand { + cmdName := "SELECT" + if options != nil && options.ReadOnly { + cmdName = "EXAMINE" + } + + cmd := &SelectCommand{mailbox: mailbox} + enc := c.beginCommand(cmdName, cmd) + enc.SP().Mailbox(mailbox) + if options != nil && options.CondStore { + enc.SP().Special('(').Atom("CONDSTORE").Special(')') + } + enc.end() + return cmd +} + +// Unselect sends an UNSELECT command. +// +// This command requires support for IMAP4rev2 or the UNSELECT extension. +func (c *Client) Unselect() *Command { + cmd := &unselectCommand{} + c.beginCommand("UNSELECT", cmd).end() + return &cmd.Command +} + +// UnselectAndExpunge sends a CLOSE command. +// +// CLOSE implicitly performs a silent EXPUNGE command. +func (c *Client) UnselectAndExpunge() *Command { + cmd := &unselectCommand{} + c.beginCommand("CLOSE", cmd).end() + return &cmd.Command +} + +func (c *Client) handleFlags() error { + flags, err := internal.ExpectFlagList(c.dec) + if err != nil { + return err + } + + c.mutex.Lock() + if c.state == imap.ConnStateSelected { + c.mailbox = c.mailbox.copy() + c.mailbox.PermanentFlags = flags + } + c.mutex.Unlock() + + cmd := findPendingCmdByType[*SelectCommand](c) + if cmd != nil { + cmd.data.Flags = flags + } else if handler := c.options.unilateralDataHandler().Mailbox; handler != nil { + handler(&UnilateralDataMailbox{Flags: flags}) + } + + return nil +} + +func (c *Client) handleExists(num uint32) error { + cmd := findPendingCmdByType[*SelectCommand](c) + if cmd != nil { + cmd.data.NumMessages = num + } else { + c.mutex.Lock() + if c.state == imap.ConnStateSelected { + c.mailbox = c.mailbox.copy() + c.mailbox.NumMessages = num + } + c.mutex.Unlock() + + if handler := c.options.unilateralDataHandler().Mailbox; handler != nil { + handler(&UnilateralDataMailbox{NumMessages: &num}) + } + } + return nil +} + +// SelectCommand is a SELECT command. +type SelectCommand struct { + commandBase + mailbox string + data imap.SelectData +} + +func (cmd *SelectCommand) Wait() (*imap.SelectData, error) { + return &cmd.data, cmd.wait() +} + +type unselectCommand struct { + Command +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/sort.go b/vendor/github.com/emersion/go-imap/v2/imapclient/sort.go new file mode 100644 index 0000000000..260706d391 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/sort.go @@ -0,0 +1,84 @@ +package imapclient + +import ( + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +type SortKey string + +const ( + SortKeyArrival SortKey = "ARRIVAL" + SortKeyCc SortKey = "CC" + SortKeyDate SortKey = "DATE" + SortKeyFrom SortKey = "FROM" + SortKeySize SortKey = "SIZE" + SortKeySubject SortKey = "SUBJECT" + SortKeyTo SortKey = "TO" +) + +type SortCriterion struct { + Key SortKey + Reverse bool +} + +// SortOptions contains options for the SORT command. +type SortOptions struct { + SearchCriteria *imap.SearchCriteria + SortCriteria []SortCriterion +} + +func (c *Client) sort(numKind imapwire.NumKind, options *SortOptions) *SortCommand { + cmd := &SortCommand{} + enc := c.beginCommand(uidCmdName("SORT", numKind), cmd) + enc.SP().List(len(options.SortCriteria), func(i int) { + criterion := options.SortCriteria[i] + if criterion.Reverse { + enc.Atom("REVERSE").SP() + } + enc.Atom(string(criterion.Key)) + }) + enc.SP().Atom("UTF-8").SP() + writeSearchKey(enc.Encoder, options.SearchCriteria) + enc.end() + return cmd +} + +func (c *Client) handleSort() error { + cmd := findPendingCmdByType[*SortCommand](c) + for c.dec.SP() { + var num uint32 + if !c.dec.ExpectNumber(&num) { + return c.dec.Err() + } + if cmd != nil { + cmd.nums = append(cmd.nums, num) + } + } + return nil +} + +// Sort sends a SORT command. +// +// This command requires support for the SORT extension. +func (c *Client) Sort(options *SortOptions) *SortCommand { + return c.sort(imapwire.NumKindSeq, options) +} + +// UIDSort sends a UID SORT command. +// +// See Sort. +func (c *Client) UIDSort(options *SortOptions) *SortCommand { + return c.sort(imapwire.NumKindUID, options) +} + +// SortCommand is a SORT command. +type SortCommand struct { + commandBase + nums []uint32 +} + +func (cmd *SortCommand) Wait() ([]uint32, error) { + err := cmd.wait() + return cmd.nums, err +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/starttls.go b/vendor/github.com/emersion/go-imap/v2/imapclient/starttls.go new file mode 100644 index 0000000000..8b63cca14a --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/starttls.go @@ -0,0 +1,83 @@ +package imapclient + +import ( + "bufio" + "bytes" + "crypto/tls" + "io" + "net" +) + +// startTLS sends a STARTTLS command. +// +// Unlike other commands, this method blocks until the command completes. +func (c *Client) startTLS(config *tls.Config) error { + upgradeDone := make(chan struct{}) + cmd := &startTLSCommand{ + tlsConfig: config, + upgradeDone: upgradeDone, + } + enc := c.beginCommand("STARTTLS", cmd) + enc.flush() + defer enc.end() + + // Once a client issues a STARTTLS command, it MUST NOT issue further + // commands until a server response is seen and the TLS negotiation is + // complete + + if err := cmd.wait(); err != nil { + return err + } + + // The decoder goroutine will invoke Client.upgradeStartTLS + <-upgradeDone + + return cmd.tlsConn.Handshake() +} + +// upgradeStartTLS finishes the STARTTLS upgrade after the server has sent an +// OK response. It runs in the decoder goroutine. +func (c *Client) upgradeStartTLS(startTLS *startTLSCommand) { + defer close(startTLS.upgradeDone) + + // Drain buffered data from our bufio.Reader + var buf bytes.Buffer + if _, err := io.CopyN(&buf, c.br, int64(c.br.Buffered())); err != nil { + panic(err) // unreachable + } + + var cleartextConn net.Conn + if buf.Len() > 0 { + r := io.MultiReader(&buf, c.conn) + cleartextConn = startTLSConn{c.conn, r} + } else { + cleartextConn = c.conn + } + + tlsConn := tls.Client(cleartextConn, startTLS.tlsConfig) + rw := c.options.wrapReadWriter(tlsConn) + + c.br.Reset(rw) + // Unfortunately we can't re-use the bufio.Writer here, it races with + // Client.StartTLS + c.bw = bufio.NewWriter(rw) + + startTLS.tlsConn = tlsConn +} + +type startTLSCommand struct { + commandBase + tlsConfig *tls.Config + + upgradeDone chan<- struct{} + tlsConn *tls.Conn +} + +type startTLSConn struct { + net.Conn + r io.Reader +} + +func (conn startTLSConn) Read(b []byte) (int, error) { + return conn.r.Read(b) +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/status.go b/vendor/github.com/emersion/go-imap/v2/imapclient/status.go new file mode 100644 index 0000000000..86cf0c3310 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/status.go @@ -0,0 +1,161 @@ +package imapclient + +import ( + "fmt" + "strings" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +func statusItems(options *imap.StatusOptions) []string { + m := map[string]bool{ + "MESSAGES": options.NumMessages, + "UIDNEXT": options.UIDNext, + "UIDVALIDITY": options.UIDValidity, + "UNSEEN": options.NumUnseen, + "DELETED": options.NumDeleted, + "SIZE": options.Size, + "APPENDLIMIT": options.AppendLimit, + "DELETED-STORAGE": options.DeletedStorage, + "HIGHESTMODSEQ": options.HighestModSeq, + } + + var l []string + for k, req := range m { + if req { + l = append(l, k) + } + } + return l +} + +// Status sends a STATUS command. +// +// A nil options pointer is equivalent to a zero options value. +func (c *Client) Status(mailbox string, options *imap.StatusOptions) *StatusCommand { + if options == nil { + options = new(imap.StatusOptions) + } + + cmd := &StatusCommand{mailbox: mailbox} + enc := c.beginCommand("STATUS", cmd) + enc.SP().Mailbox(mailbox).SP() + items := statusItems(options) + enc.List(len(items), func(i int) { + enc.Atom(items[i]) + }) + enc.end() + return cmd +} + +func (c *Client) handleStatus() error { + data, err := readStatus(c.dec) + if err != nil { + return fmt.Errorf("in status: %v", err) + } + + cmd := c.findPendingCmdFunc(func(cmd command) bool { + switch cmd := cmd.(type) { + case *StatusCommand: + return cmd.mailbox == data.Mailbox + case *ListCommand: + return cmd.returnStatus && cmd.pendingData != nil && cmd.pendingData.Mailbox == data.Mailbox + default: + return false + } + }) + switch cmd := cmd.(type) { + case *StatusCommand: + cmd.data = *data + case *ListCommand: + cmd.pendingData.Status = data + cmd.mailboxes <- cmd.pendingData + cmd.pendingData = nil + } + + return nil +} + +// StatusCommand is a STATUS command. +type StatusCommand struct { + commandBase + mailbox string + data imap.StatusData +} + +func (cmd *StatusCommand) Wait() (*imap.StatusData, error) { + return &cmd.data, cmd.wait() +} + +func readStatus(dec *imapwire.Decoder) (*imap.StatusData, error) { + var data imap.StatusData + + if !dec.ExpectMailbox(&data.Mailbox) || !dec.ExpectSP() { + return nil, dec.Err() + } + + err := dec.ExpectList(func() error { + if err := readStatusAttVal(dec, &data); err != nil { + return fmt.Errorf("in status-att-val: %v", dec.Err()) + } + return nil + }) + return &data, err +} + +func readStatusAttVal(dec *imapwire.Decoder, data *imap.StatusData) error { + var name string + if !dec.ExpectAtom(&name) || !dec.ExpectSP() { + return dec.Err() + } + + var ok bool + switch strings.ToUpper(name) { + case "MESSAGES": + var num uint32 + ok = dec.ExpectNumber(&num) + data.NumMessages = &num + case "UIDNEXT": + var uidNext imap.UID + ok = dec.ExpectUID(&uidNext) + data.UIDNext = uidNext + case "UIDVALIDITY": + ok = dec.ExpectNumber(&data.UIDValidity) + case "UNSEEN": + var num uint32 + ok = dec.ExpectNumber(&num) + data.NumUnseen = &num + case "DELETED": + var num uint32 + ok = dec.ExpectNumber(&num) + data.NumDeleted = &num + case "SIZE": + var size int64 + ok = dec.ExpectNumber64(&size) + data.Size = &size + case "APPENDLIMIT": + var num uint32 + if dec.Number(&num) { + ok = true + } else { + ok = dec.ExpectNIL() + num = ^uint32(0) + } + data.AppendLimit = &num + case "DELETED-STORAGE": + var storage int64 + ok = dec.ExpectNumber64(&storage) + data.DeletedStorage = &storage + case "HIGHESTMODSEQ": + ok = dec.ExpectModSeq(&data.HighestModSeq) + default: + if !dec.DiscardValue() { + return dec.Err() + } + } + if !ok { + return dec.Err() + } + return nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/store.go b/vendor/github.com/emersion/go-imap/v2/imapclient/store.go new file mode 100644 index 0000000000..a8be6d107c --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/store.go @@ -0,0 +1,44 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// Store sends a STORE command. +// +// Unless StoreFlags.Silent is set, the server will return the updated values. +// +// A nil options pointer is equivalent to a zero options value. +func (c *Client) Store(numSet imap.NumSet, store *imap.StoreFlags, options *imap.StoreOptions) *FetchCommand { + cmd := &FetchCommand{ + numSet: numSet, + msgs: make(chan *FetchMessageData, 128), + } + enc := c.beginCommand(uidCmdName("STORE", imapwire.NumSetKind(numSet)), cmd) + enc.SP().NumSet(numSet).SP() + if options != nil && options.UnchangedSince != 0 { + enc.Special('(').Atom("UNCHANGEDSINCE").SP().ModSeq(options.UnchangedSince).Special(')').SP() + } + switch store.Op { + case imap.StoreFlagsSet: + // nothing to do + case imap.StoreFlagsAdd: + enc.Special('+') + case imap.StoreFlagsDel: + enc.Special('-') + default: + panic(fmt.Errorf("imapclient: unknown store flags op: %v", store.Op)) + } + enc.Atom("FLAGS") + if store.Silent { + enc.Atom(".SILENT") + } + enc.SP().List(len(store.Flags), func(i int) { + enc.Flag(store.Flags[i]) + }) + enc.end() + return cmd +} diff --git a/vendor/github.com/emersion/go-imap/v2/imapclient/thread.go b/vendor/github.com/emersion/go-imap/v2/imapclient/thread.go new file mode 100644 index 0000000000..c341a18ef2 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/imapclient/thread.go @@ -0,0 +1,85 @@ +package imapclient + +import ( + "fmt" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +// ThreadOptions contains options for the THREAD command. +type ThreadOptions struct { + Algorithm imap.ThreadAlgorithm + SearchCriteria *imap.SearchCriteria +} + +func (c *Client) thread(numKind imapwire.NumKind, options *ThreadOptions) *ThreadCommand { + cmd := &ThreadCommand{} + enc := c.beginCommand(uidCmdName("THREAD", numKind), cmd) + enc.SP().Atom(string(options.Algorithm)).SP().Atom("UTF-8").SP() + writeSearchKey(enc.Encoder, options.SearchCriteria) + enc.end() + return cmd +} + +// Thread sends a THREAD command. +// +// This command requires support for the THREAD extension. +func (c *Client) Thread(options *ThreadOptions) *ThreadCommand { + return c.thread(imapwire.NumKindSeq, options) +} + +// UIDThread sends a UID THREAD command. +// +// See Thread. +func (c *Client) UIDThread(options *ThreadOptions) *ThreadCommand { + return c.thread(imapwire.NumKindUID, options) +} + +func (c *Client) handleThread() error { + cmd := findPendingCmdByType[*ThreadCommand](c) + for c.dec.SP() { + data, err := readThreadList(c.dec) + if err != nil { + return fmt.Errorf("in thread-list: %v", err) + } + if cmd != nil { + cmd.data = append(cmd.data, *data) + } + } + return nil +} + +// ThreadCommand is a THREAD command. +type ThreadCommand struct { + commandBase + data []ThreadData +} + +func (cmd *ThreadCommand) Wait() ([]ThreadData, error) { + err := cmd.wait() + return cmd.data, err +} + +type ThreadData struct { + Chain []uint32 + SubThreads []ThreadData +} + +func readThreadList(dec *imapwire.Decoder) (*ThreadData, error) { + var data ThreadData + err := dec.ExpectList(func() error { + var num uint32 + if len(data.SubThreads) == 0 && dec.Number(&num) { + data.Chain = append(data.Chain, num) + } else { + sub, err := readThreadList(dec) + if err != nil { + return err + } + data.SubThreads = append(data.SubThreads, *sub) + } + return nil + }) + return &data, err +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/acl.go b/vendor/github.com/emersion/go-imap/v2/internal/acl.go new file mode 100644 index 0000000000..43c078751d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/acl.go @@ -0,0 +1,13 @@ +package internal + +import ( + "github.com/emersion/go-imap/v2" +) + +func FormatRights(rm imap.RightModification, rs imap.RightSet) string { + s := "" + if rm != imap.RightModificationReplace { + s = string(rm) + } + return s + string(rs) +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/imapnum/numset.go b/vendor/github.com/emersion/go-imap/v2/internal/imapnum/numset.go new file mode 100644 index 0000000000..25a4f292f1 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/imapnum/numset.go @@ -0,0 +1,306 @@ +package imapnum + +import ( + "fmt" + "strconv" + "strings" +) + +// Range represents a single seq-number or seq-range value (RFC 3501 ABNF). Values +// may be static (e.g. "1", "2:4") or dynamic (e.g. "*", "1:*"). A seq-number is +// represented by setting Start = Stop. Zero is used to represent "*", which is +// safe because seq-number uses nz-number rule. The order of values is always +// Start <= Stop, except when representing "n:*", where Start = n and Stop = 0. +type Range struct { + Start, Stop uint32 +} + +// Contains returns true if the seq-number q is contained in range value s. +// The dynamic value "*" contains only other "*" values, the dynamic range "n:*" +// contains "*" and all numbers >= n. +func (s Range) Contains(q uint32) bool { + if q == 0 { + return s.Stop == 0 // "*" is contained only in "*" and "n:*" + } + return s.Start != 0 && s.Start <= q && (q <= s.Stop || s.Stop == 0) +} + +// Less returns true if s precedes and does not contain seq-number q. +func (s Range) Less(q uint32) bool { + return (s.Stop < q || q == 0) && s.Stop != 0 +} + +// Merge combines range values s and t into a single union if the two +// intersect or one is a superset of the other. The order of s and t does not +// matter. If the values cannot be merged, s is returned unmodified and ok is +// set to false. +func (s Range) Merge(t Range) (union Range, ok bool) { + union = s + if s == t { + return s, true + } + if s.Start != 0 && t.Start != 0 { + // s and t are any combination of "n", "n:m", or "n:*" + if s.Start > t.Start { + s, t = t, s + } + // s starts at or before t, check where it ends + if (s.Stop >= t.Stop && t.Stop != 0) || s.Stop == 0 { + return s, true // s is a superset of t + } + // s is "n" or "n:m", if m == ^uint32(0) then t is "n:*" + if s.Stop+1 >= t.Start || s.Stop == ^uint32(0) { + return Range{s.Start, t.Stop}, true // s intersects or touches t + } + return union, false + } + // exactly one of s and t is "*" + if s.Start == 0 { + if t.Stop == 0 { + return t, true // s is "*", t is "n:*" + } + } else if s.Stop == 0 { + return s, true // s is "n:*", t is "*" + } + return union, false +} + +// String returns range value s as a seq-number or seq-range string. +func (s Range) String() string { + if s.Start == s.Stop { + if s.Start == 0 { + return "*" + } + return strconv.FormatUint(uint64(s.Start), 10) + } + b := strconv.AppendUint(make([]byte, 0, 24), uint64(s.Start), 10) + if s.Stop == 0 { + return string(append(b, ':', '*')) + } + return string(strconv.AppendUint(append(b, ':'), uint64(s.Stop), 10)) +} + +func (s Range) append(nums []uint32) (out []uint32, ok bool) { + if s.Start == 0 || s.Stop == 0 { + return nil, false + } + for n := s.Start; n <= s.Stop; n++ { + nums = append(nums, n) + } + return nums, true +} + +// Set is used to represent a set of message sequence numbers or UIDs (see +// sequence-set ABNF rule). The zero value is an empty set. +type Set []Range + +// AddNum inserts new numbers into the set. The value 0 represents "*". +func (s *Set) AddNum(q ...uint32) { + for _, v := range q { + s.insert(Range{v, v}) + } +} + +// AddRange inserts a new range into the set. +func (s *Set) AddRange(start, stop uint32) { + if (stop < start && stop != 0) || start == 0 { + s.insert(Range{stop, start}) + } else { + s.insert(Range{start, stop}) + } +} + +// AddSet inserts all values from t into s. +func (s *Set) AddSet(t Set) { + for _, v := range t { + s.insert(v) + } +} + +// Dynamic returns true if the set contains "*" or "n:*" values. +func (s Set) Dynamic() bool { + return len(s) > 0 && s[len(s)-1].Stop == 0 +} + +// Contains returns true if the non-zero sequence number or UID q is contained +// in the set. The dynamic range "n:*" contains all q >= n. It is the caller's +// responsibility to handle the special case where q is the maximum UID in the +// mailbox and q < n (i.e. the set cannot match UIDs against "*:n" or "*" since +// it doesn't know what the maximum value is). +func (s Set) Contains(q uint32) bool { + if _, ok := s.search(q); ok { + return q != 0 + } + return false +} + +// Nums returns a slice of all numbers contained in the set. +func (s Set) Nums() (nums []uint32, ok bool) { + for _, v := range s { + nums, ok = v.append(nums) + if !ok { + return nil, false + } + } + return nums, true +} + +// String returns a sorted representation of all contained number values. +func (s Set) String() string { + if len(s) == 0 { + return "" + } + b := make([]byte, 0, 64) + for _, v := range s { + b = append(b, ',') + if v.Start == 0 { + b = append(b, '*') + continue + } + b = strconv.AppendUint(b, uint64(v.Start), 10) + if v.Start != v.Stop { + if v.Stop == 0 { + b = append(b, ':', '*') + continue + } + b = strconv.AppendUint(append(b, ':'), uint64(v.Stop), 10) + } + } + return string(b[1:]) +} + +// insert adds range value v to the set. +func (ptr *Set) insert(v Range) { + s := *ptr + defer func() { + *ptr = s + }() + + i, _ := s.search(v.Start) + merged := false + if i > 0 { + // try merging with the preceding entry (e.g. "1,4".insert(2), i == 1) + s[i-1], merged = s[i-1].Merge(v) + } + if i == len(s) { + // v was either merged with the last entry or needs to be appended + if !merged { + s.insertAt(i, v) + } + return + } else if merged { + i-- + } else if s[i], merged = s[i].Merge(v); !merged { + s.insertAt(i, v) // insert in the middle (e.g. "1,5".insert(3), i == 1) + return + } + // v was merged with s[i], continue trying to merge until the end + for j := i + 1; j < len(s); j++ { + if s[i], merged = s[i].Merge(s[j]); !merged { + if j > i+1 { + // cut out all entries between i and j that were merged + s = append(s[:i+1], s[j:]...) + } + return + } + } + // everything after s[i] was merged + s = s[:i+1] +} + +// insertAt inserts a new range value v at index i, resizing s.Set as needed. +func (ptr *Set) insertAt(i int, v Range) { + s := *ptr + defer func() { + *ptr = s + }() + + if n := len(s); i == n { + // insert at the end + s = append(s, v) + return + } else if n < cap(s) { + // enough space, shift everything at and after i to the right + s = s[:n+1] + copy(s[i+1:], s[i:]) + } else { + // allocate new slice and copy everything, n is at least 1 + set := make([]Range, n+1, n*2) + copy(set, s[:i]) + copy(set[i+1:], s[i:]) + s = set + } + s[i] = v +} + +// search attempts to find the index of the range set value that contains q. +// If no values contain q, the returned index is the position where q should be +// inserted and ok is set to false. +func (s Set) search(q uint32) (i int, ok bool) { + min, max := 0, len(s)-1 + for min < max { + if mid := (min + max) >> 1; s[mid].Less(q) { + min = mid + 1 + } else { + max = mid + } + } + if max < 0 || s[min].Less(q) { + return len(s), false // q is the new largest value + } + return min, s[min].Contains(q) +} + +// errBadNumSet is used to report problems with the format of a number set +// value. +type errBadNumSet string + +func (err errBadNumSet) Error() string { + return fmt.Sprintf("imap: bad number set value %q", string(err)) +} + +// parseNum parses a single seq-number value (non-zero uint32 or "*"). +func parseNum(v string) (uint32, error) { + if n, err := strconv.ParseUint(v, 10, 32); err == nil && v[0] != '0' { + return uint32(n), nil + } else if v == "*" { + return 0, nil + } + return 0, errBadNumSet(v) +} + +// parseNumRange creates a new seq instance by parsing strings in the format +// "n" or "n:m", where n and/or m may be "*". An error is returned for invalid +// values. +func parseNumRange(v string) (Range, error) { + var ( + r Range + err error + ) + if sep := strings.IndexRune(v, ':'); sep < 0 { + r.Start, err = parseNum(v) + r.Stop = r.Start + return r, err + } else if r.Start, err = parseNum(v[:sep]); err == nil { + if r.Stop, err = parseNum(v[sep+1:]); err == nil { + if (r.Stop < r.Start && r.Stop != 0) || r.Start == 0 { + r.Start, r.Stop = r.Stop, r.Start + } + return r, nil + } + } + return r, errBadNumSet(v) +} + +// ParseSet returns a new Set after parsing the set string. +func ParseSet(set string) (Set, error) { + var s Set + for _, sv := range strings.Split(set, ",") { + r, err := parseNumRange(sv) + if err != nil { + return s, err + } + s.AddRange(r.Start, r.Stop) + } + return s, nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/imapwire/decoder.go b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/decoder.go new file mode 100644 index 0000000000..cfd2995c02 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/decoder.go @@ -0,0 +1,654 @@ +package imapwire + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" + "unicode" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapnum" + "github.com/emersion/go-imap/v2/internal/utf7" +) + +// This limits the max list nesting depth to prevent stack overflow. +const maxListDepth = 1000 + +// IsAtomChar returns true if ch is an ATOM-CHAR. +func IsAtomChar(ch byte) bool { + switch ch { + case '(', ')', '{', ' ', '%', '*', '"', '\\', ']': + return false + default: + return !unicode.IsControl(rune(ch)) + } +} + +// Is non-empty char +func isAStringChar(ch byte) bool { + return IsAtomChar(ch) || ch == ']' +} + +// DecoderExpectError is an error due to the Decoder.Expect family of methods. +type DecoderExpectError struct { + Message string +} + +func (err *DecoderExpectError) Error() string { + return fmt.Sprintf("imapwire: %v", err.Message) +} + +// A Decoder reads IMAP data. +// +// There are multiple families of methods: +// +// - Methods directly named after IMAP grammar elements attempt to decode +// said element, and return false if it's another element. +// - "Expect" methods do the same, but set the decoder error (see Err) on +// failure. +type Decoder struct { + // CheckBufferedLiteralFunc is called when a literal is about to be decoded + // and needs to be fully buffered in memory. + CheckBufferedLiteralFunc func(size int64, nonSync bool) error + // MaxSize defines a maximum number of bytes to be read from the input. + // Literals are ignored. + MaxSize int64 + + r *bufio.Reader + side ConnSide + err error + literal bool + crlf bool + listDepth int + readBytes int64 +} + +// NewDecoder creates a new decoder. +func NewDecoder(r *bufio.Reader, side ConnSide) *Decoder { + return &Decoder{r: r, side: side} +} + +func (dec *Decoder) mustUnreadByte() { + if err := dec.r.UnreadByte(); err != nil { + panic(fmt.Errorf("imapwire: failed to unread byte: %v", err)) + } + dec.readBytes-- +} + +// Err returns the decoder error, if any. +func (dec *Decoder) Err() error { + return dec.err +} + +func (dec *Decoder) returnErr(err error) bool { + if err == nil { + return true + } + if dec.err == nil { + dec.err = err + } + return false +} + +func (dec *Decoder) readByte() (byte, bool) { + if dec.MaxSize > 0 && dec.readBytes > dec.MaxSize { + return 0, dec.returnErr(fmt.Errorf("imapwire: max size exceeded")) + } + dec.crlf = false + if dec.literal { + return 0, dec.returnErr(fmt.Errorf("imapwire: cannot decode while a literal is open")) + } + b, err := dec.r.ReadByte() + if err != nil { + if err == io.EOF { + err = io.ErrUnexpectedEOF + } + return b, dec.returnErr(err) + } + dec.readBytes++ + return b, true +} + +func (dec *Decoder) acceptByte(want byte) bool { + got, ok := dec.readByte() + if !ok { + return false + } else if got != want { + dec.mustUnreadByte() + return false + } + return true +} + +// EOF returns true if end-of-file is reached. +func (dec *Decoder) EOF() bool { + _, err := dec.r.ReadByte() + if err == io.EOF { + return true + } else if err != nil { + return dec.returnErr(err) + } + dec.mustUnreadByte() + return false +} + +// Expect sets the decoder error if ok is false. +func (dec *Decoder) Expect(ok bool, name string) bool { + if !ok { + msg := fmt.Sprintf("expected %v", name) + if dec.r.Buffered() > 0 { + b, _ := dec.r.Peek(1) + msg += fmt.Sprintf(", got %q", b) + } + return dec.returnErr(&DecoderExpectError{Message: msg}) + } + return true +} + +func (dec *Decoder) SP() bool { + if dec.acceptByte(' ') { + // https://github.com/emersion/go-imap/issues/571 + b, ok := dec.readByte() + if !ok { + return false + } + dec.mustUnreadByte() + return b != '\r' && b != '\n' + } + + // Special case: SP is optional if the next field is a parenthesized list + b, ok := dec.readByte() + if !ok { + return false + } + dec.mustUnreadByte() + return b == '(' +} + +func (dec *Decoder) ExpectSP() bool { + return dec.Expect(dec.SP(), "SP") +} + +func (dec *Decoder) CRLF() bool { + dec.acceptByte(' ') // https://github.com/emersion/go-imap/issues/540 + dec.acceptByte('\r') // be liberal in what we receive and accept lone LF + if !dec.acceptByte('\n') { + return false + } + dec.crlf = true + return true +} + +func (dec *Decoder) ExpectCRLF() bool { + return dec.Expect(dec.CRLF(), "CRLF") +} + +func (dec *Decoder) Func(ptr *string, valid func(ch byte) bool) bool { + var sb strings.Builder + for { + b, ok := dec.readByte() + if !ok { + return false + } + + if !valid(b) { + dec.mustUnreadByte() + break + } + + sb.WriteByte(b) + } + if sb.Len() == 0 { + return false + } + *ptr = sb.String() + return true +} + +func (dec *Decoder) Atom(ptr *string) bool { + return dec.Func(ptr, IsAtomChar) +} + +func (dec *Decoder) ExpectAtom(ptr *string) bool { + return dec.Expect(dec.Atom(ptr), "atom") +} + +func (dec *Decoder) ExpectNIL() bool { + var s string + return dec.ExpectAtom(&s) && dec.Expect(s == "NIL", "NIL") +} + +func (dec *Decoder) Special(b byte) bool { + return dec.acceptByte(b) +} + +func (dec *Decoder) ExpectSpecial(b byte) bool { + return dec.Expect(dec.Special(b), fmt.Sprintf("'%v'", string(b))) +} + +func (dec *Decoder) Text(ptr *string) bool { + var sb strings.Builder + for { + b, ok := dec.readByte() + if !ok { + return false + } else if b == '\r' || b == '\n' { + dec.mustUnreadByte() + break + } + sb.WriteByte(b) + } + if sb.Len() == 0 { + return false + } + *ptr = sb.String() + return true +} + +func (dec *Decoder) ExpectText(ptr *string) bool { + return dec.Expect(dec.Text(ptr), "text") +} + +func (dec *Decoder) DiscardUntilByte(untilCh byte) { + for { + ch, ok := dec.readByte() + if !ok { + return + } else if ch == untilCh { + dec.mustUnreadByte() + return + } + } +} + +func (dec *Decoder) DiscardLine() { + if dec.crlf { + return + } + var text string + dec.Text(&text) + dec.CRLF() +} + +func (dec *Decoder) DiscardValue() bool { + var s string + if dec.String(&s) { + return true + } + + isList, err := dec.List(func() error { + if !dec.DiscardValue() { + return dec.Err() + } + return nil + }) + if err != nil { + return false + } else if isList { + return true + } + + if dec.Atom(&s) { + return true + } + + dec.Expect(false, "value") + return false +} + +func (dec *Decoder) numberStr() (s string, ok bool) { + var sb strings.Builder + for { + ch, ok := dec.readByte() + if !ok { + return "", false + } else if ch < '0' || ch > '9' { + dec.mustUnreadByte() + break + } + sb.WriteByte(ch) + } + if sb.Len() == 0 { + return "", false + } + return sb.String(), true +} + +func (dec *Decoder) Number(ptr *uint32) bool { + s, ok := dec.numberStr() + if !ok { + return false + } + v64, err := strconv.ParseUint(s, 10, 32) + if err != nil { + return false // can happen on overflow + } + *ptr = uint32(v64) + return true +} + +func (dec *Decoder) ExpectNumber(ptr *uint32) bool { + return dec.Expect(dec.Number(ptr), "number") +} + +func (dec *Decoder) ExpectBodyFldOctets(ptr *uint32) bool { + // Workaround: some servers incorrectly return "-1" for the body structure + // size. See: + // https://github.com/emersion/go-imap/issues/534 + if dec.acceptByte('-') { + *ptr = 0 + return dec.Expect(dec.acceptByte('1'), "-1 (body-fld-octets workaround)") + } + return dec.ExpectNumber(ptr) +} + +func (dec *Decoder) Number64(ptr *int64) bool { + s, ok := dec.numberStr() + if !ok { + return false + } + v, err := strconv.ParseInt(s, 10, 64) + if err != nil { + return false // can happen on overflow + } + *ptr = v + return true +} + +func (dec *Decoder) ExpectNumber64(ptr *int64) bool { + return dec.Expect(dec.Number64(ptr), "number64") +} + +func (dec *Decoder) ModSeq(ptr *uint64) bool { + s, ok := dec.numberStr() + if !ok { + return false + } + v, err := strconv.ParseUint(s, 10, 64) + if err != nil { + return false // can happen on overflow + } + *ptr = v + return true +} + +func (dec *Decoder) ExpectModSeq(ptr *uint64) bool { + return dec.Expect(dec.ModSeq(ptr), "mod-sequence-value") +} + +func (dec *Decoder) Quoted(ptr *string) bool { + if !dec.Special('"') { + return false + } + var sb strings.Builder + for { + ch, ok := dec.readByte() + if !ok { + return false + } + + if ch == '"' { + break + } + + if ch == '\\' { + ch, ok = dec.readByte() + if !ok { + return false + } + } + + sb.WriteByte(ch) + } + *ptr = sb.String() + return true +} + +func (dec *Decoder) ExpectAString(ptr *string) bool { + if dec.Quoted(ptr) { + return true + } + if dec.Literal(ptr) { + return true + } + // We cannot do dec.Atom(ptr) here because sometimes mailbox names are unquoted, + // and they can contain special characters like `]`. + return dec.Expect(dec.Func(ptr, isAStringChar), "ASTRING-CHAR") +} + +func (dec *Decoder) String(ptr *string) bool { + return dec.Quoted(ptr) || dec.Literal(ptr) +} + +func (dec *Decoder) ExpectString(ptr *string) bool { + return dec.Expect(dec.String(ptr), "string") +} + +func (dec *Decoder) ExpectNString(ptr *string) bool { + var s string + if dec.Atom(&s) { + if !dec.Expect(s == "NIL", "nstring") { + return false + } + *ptr = "" + return true + } + return dec.ExpectString(ptr) +} + +func (dec *Decoder) ExpectNStringReader() (lit *LiteralReader, nonSync, ok bool) { + var s string + if dec.Atom(&s) { + if !dec.Expect(s == "NIL", "nstring") { + return nil, false, false + } + return nil, true, true + } + // TODO: read quoted string as a string instead of buffering + if dec.Quoted(&s) { + return newLiteralReaderFromString(s), true, true + } + if lit, nonSync, ok = dec.LiteralReader(); ok { + return lit, nonSync, true + } else { + return nil, false, dec.Expect(false, "nstring") + } +} + +func (dec *Decoder) List(f func() error) (isList bool, err error) { + if !dec.Special('(') { + return false, nil + } + if dec.Special(')') { + return true, nil + } + + dec.listDepth++ + defer func() { + dec.listDepth-- + }() + + if dec.listDepth >= maxListDepth { + return false, fmt.Errorf("imapwire: exceeded max depth") + } + + for { + if err := f(); err != nil { + return true, err + } + + if dec.Special(')') { + return true, nil + } else if !dec.ExpectSP() { + return true, dec.Err() + } + } +} + +func (dec *Decoder) ExpectList(f func() error) error { + isList, err := dec.List(f) + if err != nil { + return err + } else if !dec.Expect(isList, "(") { + return dec.Err() + } + return nil +} + +func (dec *Decoder) ExpectNList(f func() error) error { + var s string + if dec.Atom(&s) { + if !dec.Expect(s == "NIL", "NIL") { + return dec.Err() + } + return nil + } + return dec.ExpectList(f) +} + +func (dec *Decoder) ExpectMailbox(ptr *string) bool { + var name string + if !dec.ExpectAString(&name) { + return false + } + if strings.EqualFold(name, "INBOX") { + *ptr = "INBOX" + return true + } + name, err := utf7.Decode(name) + if err == nil { + *ptr = name + } + return dec.returnErr(err) +} + +func (dec *Decoder) ExpectUID(ptr *imap.UID) bool { + var num uint32 + if !dec.ExpectNumber(&num) { + return false + } + *ptr = imap.UID(num) + return true +} + +func (dec *Decoder) ExpectNumSet(kind NumKind, ptr *imap.NumSet) bool { + if dec.Special('$') { + *ptr = imap.SearchRes() + return true + } + + var s string + if !dec.Expect(dec.Func(&s, isNumSetChar), "sequence-set") { + return false + } + numSet, err := imapnum.ParseSet(s) + if err != nil { + return dec.returnErr(err) + } + + switch kind { + case NumKindSeq: + *ptr = seqSetFromNumSet(numSet) + case NumKindUID: + *ptr = uidSetFromNumSet(numSet) + } + return true +} + +func (dec *Decoder) ExpectUIDSet(ptr *imap.UIDSet) bool { + var numSet imap.NumSet + ok := dec.ExpectNumSet(NumKindUID, &numSet) + if ok { + *ptr = numSet.(imap.UIDSet) + } + return ok +} + +func isNumSetChar(ch byte) bool { + return ch == '*' || IsAtomChar(ch) +} + +func (dec *Decoder) Literal(ptr *string) bool { + lit, nonSync, ok := dec.LiteralReader() + if !ok { + return false + } + if dec.CheckBufferedLiteralFunc != nil { + if err := dec.CheckBufferedLiteralFunc(lit.Size(), nonSync); err != nil { + lit.cancel() + return false + } + } + var sb strings.Builder + _, err := io.Copy(&sb, lit) + if err == nil { + *ptr = sb.String() + } + return dec.returnErr(err) +} + +func (dec *Decoder) LiteralReader() (lit *LiteralReader, nonSync, ok bool) { + if !dec.Special('{') { + return nil, false, false + } + var size int64 + if !dec.ExpectNumber64(&size) { + return nil, false, false + } + if dec.side == ConnSideServer { + nonSync = dec.acceptByte('+') + } + if !dec.ExpectSpecial('}') || !dec.ExpectCRLF() { + return nil, false, false + } + dec.literal = true + lit = &LiteralReader{ + dec: dec, + size: size, + r: io.LimitReader(dec.r, size), + } + return lit, nonSync, true +} + +func (dec *Decoder) ExpectLiteralReader() (lit *LiteralReader, nonSync bool, err error) { + lit, nonSync, ok := dec.LiteralReader() + if !dec.Expect(ok, "literal") { + return nil, false, dec.Err() + } + return lit, nonSync, nil +} + +type LiteralReader struct { + dec *Decoder + size int64 + r io.Reader +} + +func newLiteralReaderFromString(s string) *LiteralReader { + return &LiteralReader{ + size: int64(len(s)), + r: strings.NewReader(s), + } +} + +func (lit *LiteralReader) Size() int64 { + return lit.size +} + +func (lit *LiteralReader) Read(b []byte) (int, error) { + n, err := lit.r.Read(b) + if err == io.EOF { + lit.cancel() + } + return n, err +} + +func (lit *LiteralReader) cancel() { + if lit.dec == nil { + return + } + lit.dec.literal = false + lit.dec = nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/imapwire/encoder.go b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/encoder.go new file mode 100644 index 0000000000..b27589aa6d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/encoder.go @@ -0,0 +1,341 @@ +package imapwire + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" + "unicode" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/utf7" +) + +// An Encoder writes IMAP data. +// +// Most methods don't return an error, instead they defer error handling until +// CRLF is called. These methods return the Encoder so that calls can be +// chained. +type Encoder struct { + // QuotedUTF8 allows raw UTF-8 in quoted strings. This requires IMAP4rev2 + // to be available, or UTF8=ACCEPT to be enabled. + QuotedUTF8 bool + // LiteralMinus enables non-synchronizing literals for short payloads. + // This requires IMAP4rev2 or LITERAL-. This is only meaningful for + // clients. + LiteralMinus bool + // LiteralPlus enables non-synchronizing literals for all payloads. This + // requires LITERAL+. This is only meaningful for clients. + LiteralPlus bool + // NewContinuationRequest creates a new continuation request. This is only + // meaningful for clients. + NewContinuationRequest func() *ContinuationRequest + + w *bufio.Writer + side ConnSide + err error + literal bool +} + +// NewEncoder creates a new encoder. +func NewEncoder(w *bufio.Writer, side ConnSide) *Encoder { + return &Encoder{w: w, side: side} +} + +func (enc *Encoder) setErr(err error) { + if enc.err == nil { + enc.err = err + } +} + +func (enc *Encoder) writeString(s string) *Encoder { + if enc.err != nil { + return enc + } + if enc.literal { + enc.err = fmt.Errorf("imapwire: cannot encode while a literal is open") + return enc + } + if _, err := enc.w.WriteString(s); err != nil { + enc.err = err + } + return enc +} + +// CRLF writes a "\r\n" sequence and flushes the buffered writer. +func (enc *Encoder) CRLF() error { + enc.writeString("\r\n") + if enc.err != nil { + return enc.err + } + return enc.w.Flush() +} + +func (enc *Encoder) Atom(s string) *Encoder { + return enc.writeString(s) +} + +func (enc *Encoder) SP() *Encoder { + return enc.writeString(" ") +} + +func (enc *Encoder) Special(ch byte) *Encoder { + return enc.writeString(string(ch)) +} + +func (enc *Encoder) Quoted(s string) *Encoder { + var sb strings.Builder + sb.Grow(2 + len(s)) + sb.WriteByte('"') + for i := 0; i < len(s); i++ { + ch := s[i] + if ch == '"' || ch == '\\' { + sb.WriteByte('\\') + } + sb.WriteByte(ch) + } + sb.WriteByte('"') + return enc.writeString(sb.String()) +} + +func (enc *Encoder) String(s string) *Encoder { + if !enc.validQuoted(s) { + enc.stringLiteral(s) + return enc + } + return enc.Quoted(s) +} + +func (enc *Encoder) validQuoted(s string) bool { + if len(s) > 4096 { + return false + } + + for i := 0; i < len(s); i++ { + ch := s[i] + + // NUL, CR and LF are never valid + switch ch { + case 0, '\r', '\n': + return false + } + + if !enc.QuotedUTF8 && ch > unicode.MaxASCII { + return false + } + } + return true +} + +func (enc *Encoder) stringLiteral(s string) { + var sync *ContinuationRequest + if enc.side == ConnSideClient && (!enc.LiteralMinus || len(s) > 4096) && !enc.LiteralPlus { + if enc.NewContinuationRequest != nil { + sync = enc.NewContinuationRequest() + } + if sync == nil { + enc.setErr(fmt.Errorf("imapwire: cannot send synchronizing literal")) + return + } + } + wc := enc.Literal(int64(len(s)), sync) + _, writeErr := io.WriteString(wc, s) + closeErr := wc.Close() + if writeErr != nil { + enc.setErr(writeErr) + } else if closeErr != nil { + enc.setErr(closeErr) + } +} + +func (enc *Encoder) Mailbox(name string) *Encoder { + if strings.EqualFold(name, "INBOX") { + return enc.Atom("INBOX") + } else { + if enc.QuotedUTF8 { + name = utf7.Escape(name) + } else { + name = utf7.Encode(name) + } + return enc.String(name) + } +} + +func (enc *Encoder) NumSet(numSet imap.NumSet) *Encoder { + s := numSet.String() + if s == "" { + enc.setErr(fmt.Errorf("imapwire: cannot encode empty sequence set")) + return enc + } + return enc.writeString(s) +} + +func (enc *Encoder) Flag(flag imap.Flag) *Encoder { + if flag != "\\*" && !isValidFlag(string(flag)) { + enc.setErr(fmt.Errorf("imapwire: invalid flag %q", flag)) + return enc + } + return enc.writeString(string(flag)) +} + +func (enc *Encoder) MailboxAttr(attr imap.MailboxAttr) *Encoder { + if !strings.HasPrefix(string(attr), "\\") || !isValidFlag(string(attr)) { + enc.setErr(fmt.Errorf("imapwire: invalid mailbox attribute %q", attr)) + return enc + } + return enc.writeString(string(attr)) +} + +// isValidFlag checks whether the provided string satisfies +// flag-keyword / flag-extension. +func isValidFlag(s string) bool { + for i := 0; i < len(s); i++ { + ch := s[i] + if ch == '\\' { + if i != 0 { + return false + } + } else { + if !IsAtomChar(ch) { + return false + } + } + } + return len(s) > 0 +} + +func (enc *Encoder) Number(v uint32) *Encoder { + return enc.writeString(strconv.FormatUint(uint64(v), 10)) +} + +func (enc *Encoder) Number64(v int64) *Encoder { + // TODO: disallow negative values + return enc.writeString(strconv.FormatInt(v, 10)) +} + +func (enc *Encoder) ModSeq(v uint64) *Encoder { + // TODO: disallow zero values + return enc.writeString(strconv.FormatUint(v, 10)) +} + +// List writes a parenthesized list. +func (enc *Encoder) List(n int, f func(i int)) *Encoder { + enc.Special('(') + for i := 0; i < n; i++ { + if i > 0 { + enc.SP() + } + f(i) + } + enc.Special(')') + return enc +} + +func (enc *Encoder) BeginList() *ListEncoder { + enc.Special('(') + return &ListEncoder{enc: enc} +} + +func (enc *Encoder) NIL() *Encoder { + return enc.Atom("NIL") +} + +func (enc *Encoder) Text(s string) *Encoder { + return enc.writeString(s) +} + +func (enc *Encoder) UID(uid imap.UID) *Encoder { + return enc.Number(uint32(uid)) +} + +// Literal writes a literal. +// +// The caller must write exactly size bytes to the returned writer. +// +// If sync is non-nil, the literal is synchronizing: the encoder will wait for +// nil to be sent to the channel before writing the literal data. If an error +// is sent to the channel, the literal will be cancelled. +func (enc *Encoder) Literal(size int64, sync *ContinuationRequest) io.WriteCloser { + if sync != nil && enc.side == ConnSideServer { + panic("imapwire: sync must be nil on a server-side Encoder.Literal") + } + + // TODO: literal8 + enc.writeString("{") + enc.Number64(size) + if sync == nil && enc.side == ConnSideClient { + enc.writeString("+") + } + enc.writeString("}") + + if sync == nil { + enc.writeString("\r\n") + } else { + if err := enc.CRLF(); err != nil { + return errorWriter{err} + } + if _, err := sync.Wait(); err != nil { + enc.setErr(err) + return errorWriter{err} + } + } + + enc.literal = true + return &literalWriter{ + enc: enc, + n: size, + } +} + +type errorWriter struct { + err error +} + +func (ew errorWriter) Write(b []byte) (int, error) { + return 0, ew.err +} + +func (ew errorWriter) Close() error { + return ew.err +} + +type literalWriter struct { + enc *Encoder + n int64 +} + +func (lw *literalWriter) Write(b []byte) (int, error) { + if lw.n-int64(len(b)) < 0 { + return 0, fmt.Errorf("wrote too many bytes in literal") + } + n, err := lw.enc.w.Write(b) + lw.n -= int64(n) + return n, err +} + +func (lw *literalWriter) Close() error { + lw.enc.literal = false + if lw.n != 0 { + return fmt.Errorf("wrote too few bytes in literal (%v remaining)", lw.n) + } + return nil +} + +type ListEncoder struct { + enc *Encoder + n int +} + +func (le *ListEncoder) Item() *Encoder { + if le.n > 0 { + le.enc.SP() + } + le.n++ + return le.enc +} + +func (le *ListEncoder) End() { + le.enc.Special(')') + le.enc = nil +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/imapwire/imapwire.go b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/imapwire.go new file mode 100644 index 0000000000..716d1c2d0f --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/imapwire.go @@ -0,0 +1,47 @@ +// Package imapwire implements the IMAP wire protocol. +// +// The IMAP wire protocol is defined in RFC 9051 section 4. +package imapwire + +import ( + "fmt" +) + +// ConnSide describes the side of a connection: client or server. +type ConnSide int + +const ( + ConnSideClient ConnSide = 1 + iota + ConnSideServer +) + +// ContinuationRequest is a continuation request. +// +// The sender must call either Done or Cancel. The receiver must call Wait. +type ContinuationRequest struct { + done chan struct{} + err error + text string +} + +func NewContinuationRequest() *ContinuationRequest { + return &ContinuationRequest{done: make(chan struct{})} +} + +func (cont *ContinuationRequest) Cancel(err error) { + if err == nil { + err = fmt.Errorf("imapwire: continuation request cancelled") + } + cont.err = err + close(cont.done) +} + +func (cont *ContinuationRequest) Done(text string) { + cont.text = text + close(cont.done) +} + +func (cont *ContinuationRequest) Wait() (string, error) { + <-cont.done + return cont.text, cont.err +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/imapwire/num.go b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/num.go new file mode 100644 index 0000000000..270afe1eca --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/imapwire/num.go @@ -0,0 +1,39 @@ +package imapwire + +import ( + "unsafe" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapnum" +) + +type NumKind int + +const ( + NumKindSeq NumKind = iota + 1 + NumKindUID +) + +func seqSetFromNumSet(s imapnum.Set) imap.SeqSet { + return *(*imap.SeqSet)(unsafe.Pointer(&s)) +} + +func uidSetFromNumSet(s imapnum.Set) imap.UIDSet { + return *(*imap.UIDSet)(unsafe.Pointer(&s)) +} + +func NumSetKind(numSet imap.NumSet) NumKind { + switch numSet.(type) { + case imap.SeqSet: + return NumKindSeq + case imap.UIDSet: + return NumKindUID + default: + panic("imap: invalid NumSet type") + } +} + +func ParseSeqSet(s string) (imap.SeqSet, error) { + numSet, err := imapnum.ParseSet(s) + return seqSetFromNumSet(numSet), err +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/internal.go b/vendor/github.com/emersion/go-imap/v2/internal/internal.go new file mode 100644 index 0000000000..7053d83a39 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/internal.go @@ -0,0 +1,170 @@ +package internal + +import ( + "fmt" + "strings" + "sync" + "time" + + "github.com/emersion/go-imap/v2" + "github.com/emersion/go-imap/v2/internal/imapwire" +) + +const ( + DateTimeLayout = "_2-Jan-2006 15:04:05 -0700" + DateLayout = "2-Jan-2006" +) + +const FlagRecent imap.Flag = "\\Recent" // removed in IMAP4rev2 + +func DecodeDateTime(dec *imapwire.Decoder) (time.Time, error) { + var s string + if !dec.Quoted(&s) { + return time.Time{}, nil + } + t, err := time.Parse(DateTimeLayout, s) + if err != nil { + return time.Time{}, fmt.Errorf("in date-time: %v", err) // TODO: use imapwire.DecodeExpectError? + } + return t, err +} + +func ExpectDateTime(dec *imapwire.Decoder) (time.Time, error) { + t, err := DecodeDateTime(dec) + if err != nil { + return t, err + } + if !dec.Expect(!t.IsZero(), "date-time") { + return t, dec.Err() + } + return t, nil +} + +func ExpectDate(dec *imapwire.Decoder) (time.Time, error) { + var s string + if !dec.ExpectAString(&s) { + return time.Time{}, dec.Err() + } + t, err := time.Parse(DateLayout, s) + if err != nil { + return time.Time{}, fmt.Errorf("in date: %v", err) // use imapwire.DecodeExpectError? + } + return t, nil +} + +func ExpectFlagList(dec *imapwire.Decoder) ([]imap.Flag, error) { + var flags []imap.Flag + err := dec.ExpectList(func() error { + // Some servers start the list with a space, so we need to skip it + // https://github.com/emersion/go-imap/pull/633 + dec.SP() + + flag, err := ExpectFlag(dec) + if err != nil { + return err + } + flags = append(flags, flag) + return nil + }) + return flags, err +} + +func ExpectFlag(dec *imapwire.Decoder) (imap.Flag, error) { + isSystem := dec.Special('\\') + if isSystem && dec.Special('*') { + return imap.FlagWildcard, nil // flag-perm + } + var name string + if !dec.ExpectAtom(&name) { + return "", fmt.Errorf("in flag: %w", dec.Err()) + } + if isSystem { + name = "\\" + name + } + return canonicalFlag(name), nil +} + +func ExpectMailboxAttrList(dec *imapwire.Decoder) ([]imap.MailboxAttr, error) { + var attrs []imap.MailboxAttr + err := dec.ExpectList(func() error { + attr, err := ExpectMailboxAttr(dec) + if err != nil { + return err + } + attrs = append(attrs, attr) + return nil + }) + return attrs, err +} + +func ExpectMailboxAttr(dec *imapwire.Decoder) (imap.MailboxAttr, error) { + flag, err := ExpectFlag(dec) + return canonicalMailboxAttr(string(flag)), err +} + +var ( + canonOnce sync.Once + canonFlag map[string]imap.Flag + canonMailboxAttr map[string]imap.MailboxAttr +) + +func canonInit() { + flags := []imap.Flag{ + imap.FlagSeen, + imap.FlagAnswered, + imap.FlagFlagged, + imap.FlagDeleted, + imap.FlagDraft, + imap.FlagForwarded, + imap.FlagMDNSent, + imap.FlagJunk, + imap.FlagNotJunk, + imap.FlagPhishing, + imap.FlagImportant, + } + mailboxAttrs := []imap.MailboxAttr{ + imap.MailboxAttrNonExistent, + imap.MailboxAttrNoInferiors, + imap.MailboxAttrNoSelect, + imap.MailboxAttrHasChildren, + imap.MailboxAttrHasNoChildren, + imap.MailboxAttrMarked, + imap.MailboxAttrUnmarked, + imap.MailboxAttrSubscribed, + imap.MailboxAttrRemote, + imap.MailboxAttrAll, + imap.MailboxAttrArchive, + imap.MailboxAttrDrafts, + imap.MailboxAttrFlagged, + imap.MailboxAttrJunk, + imap.MailboxAttrSent, + imap.MailboxAttrTrash, + imap.MailboxAttrImportant, + } + + canonFlag = make(map[string]imap.Flag) + for _, flag := range flags { + canonFlag[strings.ToLower(string(flag))] = flag + } + + canonMailboxAttr = make(map[string]imap.MailboxAttr) + for _, attr := range mailboxAttrs { + canonMailboxAttr[strings.ToLower(string(attr))] = attr + } +} + +func canonicalFlag(s string) imap.Flag { + canonOnce.Do(canonInit) + if flag, ok := canonFlag[strings.ToLower(s)]; ok { + return flag + } + return imap.Flag(s) +} + +func canonicalMailboxAttr(s string) imap.MailboxAttr { + canonOnce.Do(canonInit) + if attr, ok := canonMailboxAttr[strings.ToLower(s)]; ok { + return attr + } + return imap.MailboxAttr(s) +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/sasl.go b/vendor/github.com/emersion/go-imap/v2/internal/sasl.go new file mode 100644 index 0000000000..85d9f3d390 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/sasl.go @@ -0,0 +1,23 @@ +package internal + +import ( + "encoding/base64" +) + +func EncodeSASL(b []byte) string { + if len(b) == 0 { + return "=" + } else { + return base64.StdEncoding.EncodeToString(b) + } +} + +func DecodeSASL(s string) ([]byte, error) { + if s == "=" { + // go-sasl treats nil as no challenge/response, so return a non-nil + // empty byte slice + return []byte{}, nil + } else { + return base64.StdEncoding.DecodeString(s) + } +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/utf7/decoder.go b/vendor/github.com/emersion/go-imap/v2/internal/utf7/decoder.go new file mode 100644 index 0000000000..b8e906e440 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/utf7/decoder.go @@ -0,0 +1,118 @@ +package utf7 + +import ( + "errors" + "strings" + "unicode/utf16" + "unicode/utf8" +) + +// ErrInvalidUTF7 means that a decoder encountered invalid UTF-7. +var ErrInvalidUTF7 = errors.New("utf7: invalid UTF-7") + +// Decode decodes a string encoded with modified UTF-7. +// +// Note, raw UTF-8 is accepted. +func Decode(src string) (string, error) { + if !utf8.ValidString(src) { + return "", errors.New("invalid UTF-8") + } + + var sb strings.Builder + sb.Grow(len(src)) + + ascii := true + for i := 0; i < len(src); i++ { + ch := src[i] + + if ch < min || (ch > max && ch < utf8.RuneSelf) { + // Illegal code point in ASCII mode. Note, UTF-8 codepoints are + // always allowed. + return "", ErrInvalidUTF7 + } + + if ch != '&' { + sb.WriteByte(ch) + ascii = true + continue + } + + // Find the end of the Base64 or "&-" segment + start := i + 1 + for i++; i < len(src) && src[i] != '-'; i++ { + if src[i] == '\r' || src[i] == '\n' { // base64 package ignores CR and LF + return "", ErrInvalidUTF7 + } + } + + if i == len(src) { // Implicit shift ("&...") + return "", ErrInvalidUTF7 + } + + if i == start { // Escape sequence "&-" + sb.WriteByte('&') + ascii = true + } else { // Control or non-ASCII code points in base64 + if !ascii { // Null shift ("&...-&...-") + return "", ErrInvalidUTF7 + } + + b := decode([]byte(src[start:i])) + if len(b) == 0 { // Bad encoding + return "", ErrInvalidUTF7 + } + sb.Write(b) + + ascii = false + } + } + + return sb.String(), nil +} + +// Extracts UTF-16-BE bytes from base64 data and converts them to UTF-8. +// A nil slice is returned if the encoding is invalid. +func decode(b64 []byte) []byte { + var b []byte + + // Allocate a single block of memory large enough to store the Base64 data + // (if padding is required), UTF-16-BE bytes, and decoded UTF-8 bytes. + // Since a 2-byte UTF-16 sequence may expand into a 3-byte UTF-8 sequence, + // double the space allocation for UTF-8. + if n := len(b64); b64[n-1] == '=' { + return nil + } else if n&3 == 0 { + b = make([]byte, b64Enc.DecodedLen(n)*3) + } else { + n += 4 - n&3 + b = make([]byte, n+b64Enc.DecodedLen(n)*3) + copy(b[copy(b, b64):n], []byte("==")) + b64, b = b[:n], b[n:] + } + + // Decode Base64 into the first 1/3rd of b + n, err := b64Enc.Decode(b, b64) + if err != nil || n&1 == 1 { + return nil + } + + // Decode UTF-16-BE into the remaining 2/3rds of b + b, s := b[:n], b[n:] + j := 0 + for i := 0; i < n; i += 2 { + r := rune(b[i])<<8 | rune(b[i+1]) + if utf16.IsSurrogate(r) { + if i += 2; i == n { + return nil + } + r2 := rune(b[i])<<8 | rune(b[i+1]) + if r = utf16.DecodeRune(r, r2); r == utf8.RuneError { + return nil + } + } else if min <= r && r <= max { + return nil + } + j += utf8.EncodeRune(s[j:], r) + } + return s[:j] +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/utf7/encoder.go b/vendor/github.com/emersion/go-imap/v2/internal/utf7/encoder.go new file mode 100644 index 0000000000..e7107c320d --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/utf7/encoder.go @@ -0,0 +1,88 @@ +package utf7 + +import ( + "strings" + "unicode/utf16" + "unicode/utf8" +) + +// Encode encodes a string with modified UTF-7. +func Encode(src string) string { + var sb strings.Builder + sb.Grow(len(src)) + + for i := 0; i < len(src); { + ch := src[i] + + if min <= ch && ch <= max { + sb.WriteByte(ch) + if ch == '&' { + sb.WriteByte('-') + } + + i++ + } else { + start := i + + // Find the next printable ASCII code point + i++ + for i < len(src) && (src[i] < min || src[i] > max) { + i++ + } + + sb.Write(encode([]byte(src[start:i]))) + } + } + + return sb.String() +} + +// Converts string s from UTF-8 to UTF-16-BE, encodes the result as base64, +// removes the padding, and adds UTF-7 shifts. +func encode(s []byte) []byte { + // len(s) is sufficient for UTF-8 to UTF-16 conversion if there are no + // control code points (see table below). + b := make([]byte, 0, len(s)+4) + for len(s) > 0 { + r, size := utf8.DecodeRune(s) + if r > utf8.MaxRune { + r, size = utf8.RuneError, 1 // Bug fix (issue 3785) + } + s = s[size:] + if r1, r2 := utf16.EncodeRune(r); r1 != utf8.RuneError { + b = append(b, byte(r1>>8), byte(r1)) + r = r2 + } + b = append(b, byte(r>>8), byte(r)) + } + + // Encode as base64 + n := b64Enc.EncodedLen(len(b)) + 2 + b64 := make([]byte, n) + b64Enc.Encode(b64[1:], b) + + // Strip padding + n -= 2 - (len(b)+2)%3 + b64 = b64[:n] + + // Add UTF-7 shifts + b64[0] = '&' + b64[n-1] = '-' + return b64 +} + +// Escape passes through raw UTF-8 as-is and escapes the special UTF-7 marker +// (the ampersand character). +func Escape(src string) string { + var sb strings.Builder + sb.Grow(len(src)) + + for _, ch := range src { + sb.WriteRune(ch) + if ch == '&' { + sb.WriteByte('-') + } + } + + return sb.String() +} diff --git a/vendor/github.com/emersion/go-imap/v2/internal/utf7/utf7.go b/vendor/github.com/emersion/go-imap/v2/internal/utf7/utf7.go new file mode 100644 index 0000000000..3ff09a9688 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/internal/utf7/utf7.go @@ -0,0 +1,13 @@ +// Package utf7 implements modified UTF-7 encoding defined in RFC 3501 section 5.1.3 +package utf7 + +import ( + "encoding/base64" +) + +const ( + min = 0x20 // Minimum self-representing UTF-7 value + max = 0x7E // Maximum self-representing UTF-7 value +) + +var b64Enc = base64.NewEncoding("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+,") diff --git a/vendor/github.com/emersion/go-imap/v2/list.go b/vendor/github.com/emersion/go-imap/v2/list.go new file mode 100644 index 0000000000..a3103a60d9 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/list.go @@ -0,0 +1,30 @@ +package imap + +// ListOptions contains options for the LIST command. +type ListOptions struct { + SelectSubscribed bool + SelectRemote bool + SelectRecursiveMatch bool // requires SelectSubscribed to be set + SelectSpecialUse bool // requires SPECIAL-USE + + ReturnSubscribed bool + ReturnChildren bool + ReturnStatus *StatusOptions // requires IMAP4rev2 or LIST-STATUS + ReturnSpecialUse bool // requires SPECIAL-USE +} + +// ListData is the mailbox data returned by a LIST command. +type ListData struct { + Attrs []MailboxAttr + Delim rune + Mailbox string + + // Extended data + ChildInfo *ListDataChildInfo + OldName string + Status *StatusData +} + +type ListDataChildInfo struct { + Subscribed bool +} diff --git a/vendor/github.com/emersion/go-imap/v2/namespace.go b/vendor/github.com/emersion/go-imap/v2/namespace.go new file mode 100644 index 0000000000..e538a394ef --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/namespace.go @@ -0,0 +1,14 @@ +package imap + +// NamespaceData is the data returned by the NAMESPACE command. +type NamespaceData struct { + Personal []NamespaceDescriptor + Other []NamespaceDescriptor + Shared []NamespaceDescriptor +} + +// NamespaceDescriptor describes a namespace. +type NamespaceDescriptor struct { + Prefix string + Delim rune +} diff --git a/vendor/github.com/emersion/go-imap/v2/numset.go b/vendor/github.com/emersion/go-imap/v2/numset.go new file mode 100644 index 0000000000..a96b181a23 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/numset.go @@ -0,0 +1,149 @@ +package imap + +import ( + "unsafe" + + "github.com/emersion/go-imap/v2/internal/imapnum" +) + +// NumSet is a set of numbers identifying messages. NumSet is either a SeqSet +// or a UIDSet. +type NumSet interface { + // String returns the IMAP representation of the message number set. + String() string + // Dynamic returns true if the set contains "*" or "n:*" ranges or if the + // set represents the special SEARCHRES marker. + Dynamic() bool + + numSet() imapnum.Set +} + +var ( + _ NumSet = SeqSet(nil) + _ NumSet = UIDSet(nil) +) + +// SeqSet is a set of message sequence numbers. +type SeqSet []SeqRange + +// SeqSetNum returns a new SeqSet containing the specified sequence numbers. +func SeqSetNum(nums ...uint32) SeqSet { + var s SeqSet + s.AddNum(nums...) + return s +} + +func (s *SeqSet) numSetPtr() *imapnum.Set { + return (*imapnum.Set)(unsafe.Pointer(s)) +} + +func (s SeqSet) numSet() imapnum.Set { + return *s.numSetPtr() +} + +func (s SeqSet) String() string { + return s.numSet().String() +} + +func (s SeqSet) Dynamic() bool { + return s.numSet().Dynamic() +} + +// Contains returns true if the non-zero sequence number num is contained in +// the set. +func (s *SeqSet) Contains(num uint32) bool { + return s.numSet().Contains(num) +} + +// Nums returns a slice of all sequence numbers contained in the set. +func (s *SeqSet) Nums() ([]uint32, bool) { + return s.numSet().Nums() +} + +// AddNum inserts new sequence numbers into the set. The value 0 represents "*". +func (s *SeqSet) AddNum(nums ...uint32) { + s.numSetPtr().AddNum(nums...) +} + +// AddRange inserts a new range into the set. +func (s *SeqSet) AddRange(start, stop uint32) { + s.numSetPtr().AddRange(start, stop) +} + +// AddSet inserts all sequence numbers from other into s. +func (s *SeqSet) AddSet(other SeqSet) { + s.numSetPtr().AddSet(other.numSet()) +} + +// SeqRange is a range of message sequence numbers. +type SeqRange struct { + Start, Stop uint32 +} + +// UIDSet is a set of message UIDs. +type UIDSet []UIDRange + +// UIDSetNum returns a new UIDSet containing the specified UIDs. +func UIDSetNum(uids ...UID) UIDSet { + var s UIDSet + s.AddNum(uids...) + return s +} + +func (s *UIDSet) numSetPtr() *imapnum.Set { + return (*imapnum.Set)(unsafe.Pointer(s)) +} + +func (s UIDSet) numSet() imapnum.Set { + return *s.numSetPtr() +} + +func (s UIDSet) String() string { + if IsSearchRes(s) { + return "$" + } + return s.numSet().String() +} + +func (s UIDSet) Dynamic() bool { + return s.numSet().Dynamic() || IsSearchRes(s) +} + +// Contains returns true if the non-zero UID uid is contained in the set. +func (s UIDSet) Contains(uid UID) bool { + return s.numSet().Contains(uint32(uid)) +} + +// Nums returns a slice of all UIDs contained in the set. +func (s UIDSet) Nums() ([]UID, bool) { + nums, ok := s.numSet().Nums() + return uidListFromNumList(nums), ok +} + +// AddNum inserts new UIDs into the set. The value 0 represents "*". +func (s *UIDSet) AddNum(uids ...UID) { + s.numSetPtr().AddNum(numListFromUIDList(uids)...) +} + +// AddRange inserts a new range into the set. +func (s *UIDSet) AddRange(start, stop UID) { + s.numSetPtr().AddRange(uint32(start), uint32(stop)) +} + +// AddSet inserts all UIDs from other into s. +func (s *UIDSet) AddSet(other UIDSet) { + s.numSetPtr().AddSet(other.numSet()) +} + +// UIDRange is a range of message UIDs. +type UIDRange struct { + Start, Stop UID +} + +func numListFromUIDList(uids []UID) []uint32 { + return *(*[]uint32)(unsafe.Pointer(&uids)) +} + +func uidListFromNumList(nums []uint32) []UID { + return *(*[]UID)(unsafe.Pointer(&nums)) +} diff --git a/vendor/github.com/emersion/go-imap/v2/quota.go b/vendor/github.com/emersion/go-imap/v2/quota.go new file mode 100644 index 0000000000..f128fe44ff --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/quota.go @@ -0,0 +1,13 @@ +package imap + +// QuotaResourceType is a QUOTA resource type. +// +// See RFC 9208 section 5. +type QuotaResourceType string + +const ( + QuotaResourceStorage QuotaResourceType = "STORAGE" + QuotaResourceMessage QuotaResourceType = "MESSAGE" + QuotaResourceMailbox QuotaResourceType = "MAILBOX" + QuotaResourceAnnotationStorage QuotaResourceType = "ANNOTATION-STORAGE" +) diff --git a/vendor/github.com/emersion/go-imap/v2/response.go b/vendor/github.com/emersion/go-imap/v2/response.go new file mode 100644 index 0000000000..0ce54cf679 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/response.go @@ -0,0 +1,81 @@ +package imap + +import ( + "fmt" + "strings" +) + +// StatusResponseType is a generic status response type. +type StatusResponseType string + +const ( + StatusResponseTypeOK StatusResponseType = "OK" + StatusResponseTypeNo StatusResponseType = "NO" + StatusResponseTypeBad StatusResponseType = "BAD" + StatusResponseTypePreAuth StatusResponseType = "PREAUTH" + StatusResponseTypeBye StatusResponseType = "BYE" +) + +// ResponseCode is a response code. +type ResponseCode string + +const ( + ResponseCodeAlert ResponseCode = "ALERT" + ResponseCodeAlreadyExists ResponseCode = "ALREADYEXISTS" + ResponseCodeAuthenticationFailed ResponseCode = "AUTHENTICATIONFAILED" + ResponseCodeAuthorizationFailed ResponseCode = "AUTHORIZATIONFAILED" + ResponseCodeBadCharset ResponseCode = "BADCHARSET" + ResponseCodeCannot ResponseCode = "CANNOT" + ResponseCodeClientBug ResponseCode = "CLIENTBUG" + ResponseCodeContactAdmin ResponseCode = "CONTACTADMIN" + ResponseCodeCorruption ResponseCode = "CORRUPTION" + ResponseCodeExpired ResponseCode = "EXPIRED" + ResponseCodeHasChildren ResponseCode = "HASCHILDREN" + ResponseCodeInUse ResponseCode = "INUSE" + ResponseCodeLimit ResponseCode = "LIMIT" + ResponseCodeNonExistent ResponseCode = "NONEXISTENT" + ResponseCodeNoPerm ResponseCode = "NOPERM" + ResponseCodeOverQuota ResponseCode = "OVERQUOTA" + ResponseCodeParse ResponseCode = "PARSE" + ResponseCodePrivacyRequired ResponseCode = "PRIVACYREQUIRED" + ResponseCodeServerBug ResponseCode = "SERVERBUG" + ResponseCodeTryCreate ResponseCode = "TRYCREATE" + ResponseCodeUnavailable ResponseCode = "UNAVAILABLE" + ResponseCodeUnknownCTE ResponseCode = "UNKNOWN-CTE" + + // METADATA + ResponseCodeTooMany ResponseCode = "TOOMANY" + ResponseCodeNoPrivate ResponseCode = "NOPRIVATE" + + // APPENDLIMIT + ResponseCodeTooBig ResponseCode = "TOOBIG" +) + +// StatusResponse is a generic status response. +// +// See RFC 9051 section 7.1. +type StatusResponse struct { + Type StatusResponseType + Code ResponseCode + Text string +} + +// Error is an IMAP error caused by a status response. +type Error StatusResponse + +var _ error = (*Error)(nil) + +// Error implements the error interface. +func (err *Error) Error() string { + var sb strings.Builder + fmt.Fprintf(&sb, "imap: %v", err.Type) + if err.Code != "" { + fmt.Fprintf(&sb, " [%v]", err.Code) + } + text := err.Text + if text == "" { + text = "" + } + fmt.Fprintf(&sb, " %v", text) + return sb.String() +} diff --git a/vendor/github.com/emersion/go-imap/v2/search.go b/vendor/github.com/emersion/go-imap/v2/search.go new file mode 100644 index 0000000000..e5b77205ee --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/search.go @@ -0,0 +1,202 @@ +package imap + +import ( + "reflect" + "time" +) + +// SearchOptions contains options for the SEARCH command. +type SearchOptions struct { + // Requires IMAP4rev2 or ESEARCH + ReturnMin bool + ReturnMax bool + ReturnAll bool + ReturnCount bool + // Requires IMAP4rev2 or SEARCHRES + ReturnSave bool +} + +// SearchCriteria is a criteria for the SEARCH command. +// +// When multiple fields are populated, the result is the intersection ("and" +// function) of all messages that match the fields. +// +// And, Not and Or can be used to combine multiple criteria together. For +// instance, the following criteria matches messages not containing "hello": +// +// SearchCriteria{Not: []SearchCriteria{{ +// Body: []string{"hello"}, +// }}} +// +// The following criteria matches messages containing either "hello" or +// "world": +// +// SearchCriteria{Or: [][2]SearchCriteria{{ +// {Body: []string{"hello"}}, +// {Body: []string{"world"}}, +// }}} +type SearchCriteria struct { + SeqNum []SeqSet + UID []UIDSet + + // Only the date is used, the time and timezone are ignored + Since time.Time + Before time.Time + SentSince time.Time + SentBefore time.Time + + Header []SearchCriteriaHeaderField + Body []string + Text []string + + Flag []Flag + NotFlag []Flag + + Larger int64 + Smaller int64 + + Not []SearchCriteria + Or [][2]SearchCriteria + + ModSeq *SearchCriteriaModSeq // requires CONDSTORE +} + +// And intersects two search criteria. +func (criteria *SearchCriteria) And(other *SearchCriteria) { + criteria.SeqNum = append(criteria.SeqNum, other.SeqNum...) + criteria.UID = append(criteria.UID, other.UID...) + + criteria.Since = intersectSince(criteria.Since, other.Since) + criteria.Before = intersectBefore(criteria.Before, other.Before) + criteria.SentSince = intersectSince(criteria.SentSince, other.SentSince) + criteria.SentBefore = intersectBefore(criteria.SentBefore, other.SentBefore) + + criteria.Header = append(criteria.Header, other.Header...) + criteria.Body = append(criteria.Body, other.Body...) + criteria.Text = append(criteria.Text, other.Text...) + + criteria.Flag = append(criteria.Flag, other.Flag...) + criteria.NotFlag = append(criteria.NotFlag, other.NotFlag...) + + if criteria.Larger == 0 || other.Larger > criteria.Larger { + criteria.Larger = other.Larger + } + if criteria.Smaller == 0 || other.Smaller < criteria.Smaller { + criteria.Smaller = other.Smaller + } + + criteria.Not = append(criteria.Not, other.Not...) + criteria.Or = append(criteria.Or, other.Or...) +} + +func intersectSince(t1, t2 time.Time) time.Time { + switch { + case t1.IsZero(): + return t2 + case t2.IsZero(): + return t1 + case t1.After(t2): + return t1 + default: + return t2 + } +} + +func intersectBefore(t1, t2 time.Time) time.Time { + switch { + case t1.IsZero(): + return t2 + case t2.IsZero(): + return t1 + case t1.Before(t2): + return t1 + default: + return t2 + } +} + +type SearchCriteriaHeaderField struct { + Key, Value string +} + +type SearchCriteriaModSeq struct { + ModSeq uint64 + MetadataName string + MetadataType SearchCriteriaMetadataType +} + +type SearchCriteriaMetadataType string + +const ( + SearchCriteriaMetadataAll SearchCriteriaMetadataType = "all" + SearchCriteriaMetadataPrivate SearchCriteriaMetadataType = "priv" + SearchCriteriaMetadataShared SearchCriteriaMetadataType = "shared" +) + +// SearchData is the data returned by a SEARCH command. +type SearchData struct { + All NumSet + + // requires IMAP4rev2 or ESEARCH + UID bool + Min uint32 + Max uint32 + Count uint32 + + // requires CONDSTORE + ModSeq uint64 +} + +// AllSeqNums returns All as a slice of sequence numbers. +func (data *SearchData) AllSeqNums() []uint32 { + seqSet, ok := data.All.(SeqSet) + if !ok { + return nil + } + + // Note: a dynamic sequence set would be a server bug + nums, ok := seqSet.Nums() + if !ok { + panic("imap: SearchData.All is a dynamic number set") + } + return nums +} + +// AllUIDs returns All as a slice of UIDs. +func (data *SearchData) AllUIDs() []UID { + uidSet, ok := data.All.(UIDSet) + if !ok { + return nil + } + + // Note: a dynamic sequence set would be a server bug + uids, ok := uidSet.Nums() + if !ok { + panic("imap: SearchData.All is a dynamic number set") + } + return uids +} + +// searchRes is a special empty UIDSet which can be used as a marker. It has +// a non-zero cap so that its data pointer is non-nil and can be compared. +// +// It's a UIDSet rather than a SeqSet so that it can be passed to the +// UID EXPUNGE command. +var ( + searchRes = make(UIDSet, 0, 1) + searchResAddr = reflect.ValueOf(searchRes).Pointer() +) + +// SearchRes returns a special marker which can be used instead of a UIDSet to +// reference the last SEARCH result. On the wire, it's encoded as '$'. +// +// It requires IMAP4rev2 or the SEARCHRES extension. +func SearchRes() UIDSet { + return searchRes +} + +// IsSearchRes checks whether a sequence set is a reference to the last SEARCH +// result. See SearchRes. +func IsSearchRes(numSet NumSet) bool { + return reflect.ValueOf(numSet).Pointer() == searchResAddr +} diff --git a/vendor/github.com/emersion/go-imap/v2/select.go b/vendor/github.com/emersion/go-imap/v2/select.go new file mode 100644 index 0000000000..9e3359f508 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/select.go @@ -0,0 +1,25 @@ +package imap + +// SelectOptions contains options for the SELECT or EXAMINE command. +type SelectOptions struct { + ReadOnly bool + CondStore bool // requires CONDSTORE +} + +// SelectData is the data returned by a SELECT command. +// +// In the old RFC 2060, PermanentFlags, UIDNext and UIDValidity are optional. +type SelectData struct { + // Flags defined for this mailbox + Flags []Flag + // Flags that the client can change permanently + PermanentFlags []Flag + // Number of messages in this mailbox (aka. "EXISTS") + NumMessages uint32 + UIDNext UID + UIDValidity uint32 + + List *ListData // requires IMAP4rev2 + + HighestModSeq uint64 // requires CONDSTORE +} diff --git a/vendor/github.com/emersion/go-imap/v2/status.go b/vendor/github.com/emersion/go-imap/v2/status.go new file mode 100644 index 0000000000..223eb6d706 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/status.go @@ -0,0 +1,33 @@ +package imap + +// StatusOptions contains options for the STATUS command. +type StatusOptions struct { + NumMessages bool + UIDNext bool + UIDValidity bool + NumUnseen bool + NumDeleted bool // requires IMAP4rev2 or QUOTA + Size bool // requires IMAP4rev2 or STATUS=SIZE + + AppendLimit bool // requires APPENDLIMIT + DeletedStorage bool // requires QUOTA=RES-STORAGE + HighestModSeq bool // requires CONDSTORE +} + +// StatusData is the data returned by a STATUS command. +// +// The mailbox name is always populated. The remaining fields are optional. +type StatusData struct { + Mailbox string + + NumMessages *uint32 + UIDNext UID + UIDValidity uint32 + NumUnseen *uint32 + NumDeleted *uint32 + Size *int64 + + AppendLimit *uint32 + DeletedStorage *int64 + HighestModSeq uint64 +} diff --git a/vendor/github.com/emersion/go-imap/v2/store.go b/vendor/github.com/emersion/go-imap/v2/store.go new file mode 100644 index 0000000000..c1ea26fa51 --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/store.go @@ -0,0 +1,22 @@ +package imap + +// StoreOptions contains options for the STORE command. +type StoreOptions struct { + UnchangedSince uint64 // requires CONDSTORE +} + +// StoreFlagsOp is a flag operation: set, add or delete. +type StoreFlagsOp int + +const ( + StoreFlagsSet StoreFlagsOp = iota + StoreFlagsAdd + StoreFlagsDel +) + +// StoreFlags alters message flags. +type StoreFlags struct { + Op StoreFlagsOp + Silent bool + Flags []Flag +} diff --git a/vendor/github.com/emersion/go-imap/v2/thread.go b/vendor/github.com/emersion/go-imap/v2/thread.go new file mode 100644 index 0000000000..e4e3122edf --- /dev/null +++ b/vendor/github.com/emersion/go-imap/v2/thread.go @@ -0,0 +1,9 @@ +package imap + +// ThreadAlgorithm is a threading algorithm. +type ThreadAlgorithm string + +const ( + ThreadOrderedSubject ThreadAlgorithm = "ORDEREDSUBJECT" + ThreadReferences ThreadAlgorithm = "REFERENCES" +) diff --git a/vendor/github.com/emersion/go-message/.build.yml b/vendor/github.com/emersion/go-message/.build.yml new file mode 100644 index 0000000000..cc583f262c --- /dev/null +++ b/vendor/github.com/emersion/go-message/.build.yml @@ -0,0 +1,20 @@ +image: alpine/latest +packages: + - go +sources: + - https://github.com/emersion/go-message +artifacts: + - coverage.html +tasks: + - build: | + cd go-message + go build -v ./... + - test: | + cd go-message + go test -coverprofile=coverage.txt -covermode=atomic ./... + - coverage: | + cd go-message + go tool cover -html=coverage.txt -o ~/coverage.html + - gofmt: | + cd go-message + test -z $(gofmt -l .) diff --git a/vendor/github.com/emersion/go-message/.gitignore b/vendor/github.com/emersion/go-message/.gitignore new file mode 100644 index 0000000000..daf913b1b3 --- /dev/null +++ b/vendor/github.com/emersion/go-message/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/emersion/go-message/LICENSE b/vendor/github.com/emersion/go-message/LICENSE new file mode 100644 index 0000000000..0d504877bf --- /dev/null +++ b/vendor/github.com/emersion/go-message/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 emersion + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/emersion/go-message/README.md b/vendor/github.com/emersion/go-message/README.md new file mode 100644 index 0000000000..20bf89142b --- /dev/null +++ b/vendor/github.com/emersion/go-message/README.md @@ -0,0 +1,31 @@ +# go-message + +[![Go Reference](https://pkg.go.dev/badge/github.com/emersion/go-message.svg)](https://pkg.go.dev/github.com/emersion/go-message) +[![builds.sr.ht status](https://builds.sr.ht/~emersion/go-message/commits/master.svg)](https://builds.sr.ht/~emersion/go-message/commits/master?) + +A Go library for the Internet Message Format. It implements: + +* [RFC 5322]: Internet Message Format +* [RFC 2045], [RFC 2046] and [RFC 2047]: Multipurpose Internet Mail Extensions +* [RFC 2183]: Content-Disposition Header Field + +## Features + +* Streaming API +* Automatic encoding and charset handling (to decode all charsets, add + `import _ "github.com/emersion/go-message/charset"` to your application) +* A [`mail`](https://godocs.io/github.com/emersion/go-message/mail) subpackage + to read and write mail messages +* DKIM-friendly +* A [`textproto`](https://godocs.io/github.com/emersion/go-message/textproto) + subpackage that just implements the wire format + +## License + +MIT + +[RFC 5322]: https://tools.ietf.org/html/rfc5322 +[RFC 2045]: https://tools.ietf.org/html/rfc2045 +[RFC 2046]: https://tools.ietf.org/html/rfc2046 +[RFC 2047]: https://tools.ietf.org/html/rfc2047 +[RFC 2183]: https://tools.ietf.org/html/rfc2183 diff --git a/vendor/github.com/emersion/go-message/charset.go b/vendor/github.com/emersion/go-message/charset.go new file mode 100644 index 0000000000..9d4d10e724 --- /dev/null +++ b/vendor/github.com/emersion/go-message/charset.go @@ -0,0 +1,66 @@ +package message + +import ( + "errors" + "fmt" + "io" + "mime" + "strings" +) + +type UnknownCharsetError struct { + e error +} + +func (u UnknownCharsetError) Unwrap() error { return u.e } + +func (u UnknownCharsetError) Error() string { + return "unknown charset: " + u.e.Error() +} + +// IsUnknownCharset returns a boolean indicating whether the error is known to +// report that the charset advertised by the entity is unknown. +func IsUnknownCharset(err error) bool { + return errors.As(err, new(UnknownCharsetError)) +} + +// CharsetReader, if non-nil, defines a function to generate charset-conversion +// readers, converting from the provided charset into UTF-8. Charsets are always +// lower-case. utf-8 and us-ascii charsets are handled by default. One of the +// the CharsetReader's result values must be non-nil. +// +// Importing github.com/emersion/go-message/charset will set CharsetReader to +// a function that handles most common charsets. Alternatively, CharsetReader +// can be set to e.g. golang.org/x/net/html/charset.NewReaderLabel. +var CharsetReader func(charset string, input io.Reader) (io.Reader, error) + +// charsetReader calls CharsetReader if non-nil. +func charsetReader(charset string, input io.Reader) (io.Reader, error) { + charset = strings.ToLower(charset) + if charset == "utf-8" || charset == "us-ascii" { + return input, nil + } + if CharsetReader != nil { + r, err := CharsetReader(charset, input) + if err != nil { + return r, UnknownCharsetError{err} + } + return r, nil + } + return input, UnknownCharsetError{fmt.Errorf("message: unhandled charset %q", charset)} +} + +// decodeHeader decodes an internationalized header field. If it fails, it +// returns the input string and the error. +func decodeHeader(s string) (string, error) { + wordDecoder := mime.WordDecoder{CharsetReader: charsetReader} + dec, err := wordDecoder.DecodeHeader(s) + if err != nil { + return s, err + } + return dec, nil +} + +func encodeHeader(s string) string { + return mime.QEncoding.Encode("utf-8", s) +} diff --git a/vendor/github.com/emersion/go-message/encoding.go b/vendor/github.com/emersion/go-message/encoding.go new file mode 100644 index 0000000000..a503276d4e --- /dev/null +++ b/vendor/github.com/emersion/go-message/encoding.go @@ -0,0 +1,151 @@ +package message + +import ( + "bytes" + "encoding/base64" + "errors" + "fmt" + "io" + "mime/quotedprintable" + "strings" +) + +type UnknownEncodingError struct { + e error +} + +func (u UnknownEncodingError) Unwrap() error { return u.e } + +func (u UnknownEncodingError) Error() string { + return "encoding error: " + u.e.Error() +} + +// IsUnknownEncoding returns a boolean indicating whether the error is known to +// report that the encoding advertised by the entity is unknown. +func IsUnknownEncoding(err error) bool { + return errors.As(err, new(UnknownEncodingError)) +} + +func encodingReader(enc string, r io.Reader) (io.Reader, error) { + var dec io.Reader + switch strings.ToLower(enc) { + case "quoted-printable": + dec = quotedprintable.NewReader(r) + case "base64": + wrapped := &whitespaceReplacingReader{wrapped: r} + dec = base64.NewDecoder(base64.StdEncoding, wrapped) + case "7bit", "8bit", "binary", "": + dec = r + default: + return nil, fmt.Errorf("unhandled encoding %q", enc) + } + return dec, nil +} + +type nopCloser struct { + io.Writer +} + +func (nopCloser) Close() error { + return nil +} + +func encodingWriter(enc string, w io.Writer) (io.WriteCloser, error) { + var wc io.WriteCloser + switch strings.ToLower(enc) { + case "quoted-printable": + wc = quotedprintable.NewWriter(w) + case "base64": + wc = base64.NewEncoder(base64.StdEncoding, &lineWrapper{w: w, maxLineLen: 76}) + case "7bit", "8bit": + wc = nopCloser{&lineWrapper{w: w, maxLineLen: 998}} + case "binary", "": + wc = nopCloser{w} + default: + return nil, fmt.Errorf("unhandled encoding %q", enc) + } + return wc, nil +} + +// whitespaceReplacingReader replaces space and tab characters with a LF so +// base64 bodies with a continuation indent can be decoded by the base64 decoder +// even though it is against the spec. +type whitespaceReplacingReader struct { + wrapped io.Reader +} + +func (r *whitespaceReplacingReader) Read(p []byte) (int, error) { + n, err := r.wrapped.Read(p) + + for i := 0; i < n; i++ { + if p[i] == ' ' || p[i] == '\t' { + p[i] = '\n' + } + } + + return n, err +} + +type lineWrapper struct { + w io.Writer + maxLineLen int + + curLineLen int + cr bool +} + +func (w *lineWrapper) Write(b []byte) (int, error) { + var written int + for len(b) > 0 { + var l []byte + l, b = cutLine(b, w.maxLineLen-w.curLineLen) + + lf := bytes.HasSuffix(l, []byte("\n")) + l = bytes.TrimSuffix(l, []byte("\n")) + + n, err := w.w.Write(l) + if err != nil { + return written, err + } + written += n + + cr := bytes.HasSuffix(l, []byte("\r")) + if len(l) == 0 { + cr = w.cr + } + + if !lf && len(b) == 0 { + w.curLineLen += len(l) + w.cr = cr + break + } + w.curLineLen = 0 + + ending := []byte("\r\n") + if cr { + ending = []byte("\n") + } + _, err = w.w.Write(ending) + if err != nil { + return written, err + } + w.cr = false + } + + return written, nil +} + +func cutLine(b []byte, max int) ([]byte, []byte) { + for i := 0; i < len(b); i++ { + if b[i] == '\r' && i == max { + continue + } + if b[i] == '\n' { + return b[:i+1], b[i+1:] + } + if i >= max { + return b[:i], b[i:] + } + } + return b, nil +} diff --git a/vendor/github.com/emersion/go-message/entity.go b/vendor/github.com/emersion/go-message/entity.go new file mode 100644 index 0000000000..ee2ebe6a0a --- /dev/null +++ b/vendor/github.com/emersion/go-message/entity.go @@ -0,0 +1,264 @@ +package message + +import ( + "bufio" + "errors" + "io" + "math" + "strings" + + "github.com/emersion/go-message/textproto" +) + +// An Entity is either a whole message or a one of the parts in the body of a +// multipart entity. +type Entity struct { + Header Header // The entity's header. + Body io.Reader // The decoded entity's body. + + mediaType string + mediaParams map[string]string +} + +// New makes a new message with the provided header and body. The entity's +// transfer encoding and charset are automatically decoded to UTF-8. +// +// If the message uses an unknown transfer encoding or charset, New returns an +// error that verifies IsUnknownCharset, but also returns an Entity that can +// be read. +func New(header Header, body io.Reader) (*Entity, error) { + var err error + + mediaType, mediaParams, _ := header.ContentType() + + // QUIRK: RFC 2045 section 6.4 specifies that multipart messages can't have + // a Content-Transfer-Encoding other than "7bit", "8bit" or "binary". + // However some messages in the wild are non-conformant and have it set to + // e.g. "quoted-printable". So we just ignore it for multipart. + // See https://github.com/emersion/go-message/issues/48 + if !strings.HasPrefix(mediaType, "multipart/") { + enc := header.Get("Content-Transfer-Encoding") + if decoded, encErr := encodingReader(enc, body); encErr != nil { + err = UnknownEncodingError{encErr} + } else { + body = decoded + } + } + + // RFC 2046 section 4.1.2: charset only applies to text/* + if strings.HasPrefix(mediaType, "text/") { + if ch, ok := mediaParams["charset"]; ok { + if converted, charsetErr := charsetReader(ch, body); charsetErr != nil { + err = UnknownCharsetError{charsetErr} + } else { + body = converted + } + } + } + + return &Entity{ + Header: header, + Body: body, + mediaType: mediaType, + mediaParams: mediaParams, + }, err +} + +// NewMultipart makes a new multipart message with the provided header and +// parts. The Content-Type header must begin with "multipart/". +// +// If the message uses an unknown transfer encoding, NewMultipart returns an +// error that verifies IsUnknownCharset, but also returns an Entity that can +// be read. +func NewMultipart(header Header, parts []*Entity) (*Entity, error) { + r := &multipartBody{ + header: header, + parts: parts, + } + + return New(header, r) +} + +const defaultMaxHeaderBytes = 1 << 20 // 1 MB + +var errHeaderTooBig = errors.New("message: header exceeds maximum size") + +// limitedReader is the same as io.LimitedReader, but returns a custom error. +type limitedReader struct { + R io.Reader + N int64 +} + +func (lr *limitedReader) Read(p []byte) (int, error) { + if lr.N <= 0 { + return 0, errHeaderTooBig + } + if int64(len(p)) > lr.N { + p = p[0:lr.N] + } + n, err := lr.R.Read(p) + lr.N -= int64(n) + return n, err +} + +// ReadOptions are options for ReadWithOptions. +type ReadOptions struct { + // MaxHeaderBytes limits the maximum permissible size of a message header + // block. If exceeded, an error will be returned. + // + // Set to -1 for no limit, set to 0 for the default value (1MB). + MaxHeaderBytes int64 +} + +// withDefaults returns a sanitised version of the options with defaults/special +// values accounted for. +func (o *ReadOptions) withDefaults() *ReadOptions { + var out ReadOptions + if o != nil { + out = *o + } + if out.MaxHeaderBytes == 0 { + out.MaxHeaderBytes = defaultMaxHeaderBytes + } else if out.MaxHeaderBytes < 0 { + out.MaxHeaderBytes = math.MaxInt64 + } + return &out +} + +// ReadWithOptions see Read, but allows overriding some parameters with +// ReadOptions. +// +// If the message uses an unknown transfer encoding or charset, ReadWithOptions +// returns an error that verifies IsUnknownCharset or IsUnknownEncoding, but +// also returns an Entity that can be read. +func ReadWithOptions(r io.Reader, opts *ReadOptions) (*Entity, error) { + opts = opts.withDefaults() + + lr := &limitedReader{R: r, N: opts.MaxHeaderBytes} + br := bufio.NewReader(lr) + + h, err := textproto.ReadHeader(br) + if err != nil { + return nil, err + } + + lr.N = math.MaxInt64 + + return New(Header{h}, br) +} + +// Read reads a message from r. The message's encoding and charset are +// automatically decoded to raw UTF-8. Note that this function only reads the +// message header. +// +// If the message uses an unknown transfer encoding or charset, Read returns an +// error that verifies IsUnknownCharset or IsUnknownEncoding, but also returns +// an Entity that can be read. +func Read(r io.Reader) (*Entity, error) { + return ReadWithOptions(r, nil) +} + +// MultipartReader returns a MultipartReader that reads parts from this entity's +// body. If this entity is not multipart, it returns nil. +func (e *Entity) MultipartReader() MultipartReader { + if !strings.HasPrefix(e.mediaType, "multipart/") { + return nil + } + if mb, ok := e.Body.(*multipartBody); ok { + return mb + } + return &multipartReader{textproto.NewMultipartReader(e.Body, e.mediaParams["boundary"])} +} + +// writeBodyTo writes this entity's body to w (without the header). +func (e *Entity) writeBodyTo(w *Writer) error { + var err error + if mb, ok := e.Body.(*multipartBody); ok { + err = mb.writeBodyTo(w) + } else { + _, err = io.Copy(w, e.Body) + } + return err +} + +// WriteTo writes this entity's header and body to w. +func (e *Entity) WriteTo(w io.Writer) error { + ew, err := CreateWriter(w, e.Header) + if err != nil { + return err + } + + if err := e.writeBodyTo(ew); err != nil { + ew.Close() + return err + } + + return ew.Close() +} + +// WalkFunc is the type of the function called for each part visited by Walk. +// +// The path argument is a list of multipart indices leading to the part. The +// root part has a nil path. +// +// If there was an encoding error walking to a part, the incoming error will +// describe the problem and the function can decide how to handle that error. +// +// Unlike IMAP part paths, indices start from 0 (instead of 1) and a +// non-multipart message has a nil path (instead of {1}). +// +// If an error is returned, processing stops. +type WalkFunc func(path []int, entity *Entity, err error) error + +// Walk walks the entity's multipart tree, calling walkFunc for each part in +// the tree, including the root entity. +// +// Walk consumes the entity. +func (e *Entity) Walk(walkFunc WalkFunc) error { + var multipartReaders []MultipartReader + var path []int + part := e + for { + var err error + if part == nil { + if len(multipartReaders) == 0 { + break + } + + // Get the next part from the last multipart reader + mr := multipartReaders[len(multipartReaders)-1] + part, err = mr.NextPart() + if err == io.EOF { + multipartReaders = multipartReaders[:len(multipartReaders)-1] + path = path[:len(path)-1] + continue + } else if IsUnknownEncoding(err) || IsUnknownCharset(err) { + // Forward the error to walkFunc + } else if err != nil { + return err + } + + path[len(path)-1]++ + } + + // Copy the path since we'll mutate it on the next iteration + var pathCopy []int + if len(path) > 0 { + pathCopy = make([]int, len(path)) + copy(pathCopy, path) + } + + if err := walkFunc(pathCopy, part, err); err != nil { + return err + } + + if mr := part.MultipartReader(); mr != nil { + multipartReaders = append(multipartReaders, mr) + path = append(path, -1) + } + + part = nil + } + + return nil +} diff --git a/vendor/github.com/emersion/go-message/header.go b/vendor/github.com/emersion/go-message/header.go new file mode 100644 index 0000000000..1a98fe6605 --- /dev/null +++ b/vendor/github.com/emersion/go-message/header.go @@ -0,0 +1,118 @@ +package message + +import ( + "mime" + + "github.com/emersion/go-message/textproto" +) + +func parseHeaderWithParams(s string) (f string, params map[string]string, err error) { + f, params, err = mime.ParseMediaType(s) + if err != nil { + return s, nil, err + } + for k, v := range params { + params[k], _ = decodeHeader(v) + } + return +} + +func formatHeaderWithParams(f string, params map[string]string) string { + encParams := make(map[string]string) + for k, v := range params { + encParams[k] = encodeHeader(v) + } + return mime.FormatMediaType(f, encParams) +} + +// HeaderFields iterates over header fields. +type HeaderFields interface { + textproto.HeaderFields + + // Text parses the value of the current field as plaintext. The field + // charset is decoded to UTF-8. If the header field's charset is unknown, + // the raw field value is returned and the error verifies IsUnknownCharset. + Text() (string, error) +} + +type headerFields struct { + textproto.HeaderFields +} + +func (hf *headerFields) Text() (string, error) { + return decodeHeader(hf.Value()) +} + +// A Header represents the key-value pairs in a message header. +type Header struct { + textproto.Header +} + +// HeaderFromMap creates a header from a map of header fields. +// +// This function is provided for interoperability with the standard library. +// If possible, ReadHeader should be used instead to avoid loosing information. +// The map representation looses the ordering of the fields, the capitalization +// of the header keys, and the whitespace of the original header. +func HeaderFromMap(m map[string][]string) Header { + return Header{textproto.HeaderFromMap(m)} +} + +// ContentType parses the Content-Type header field. +// +// If no Content-Type is specified, it returns "text/plain". +func (h *Header) ContentType() (t string, params map[string]string, err error) { + v := h.Get("Content-Type") + if v == "" { + return "text/plain", nil, nil + } + return parseHeaderWithParams(v) +} + +// SetContentType formats the Content-Type header field. +func (h *Header) SetContentType(t string, params map[string]string) { + h.Set("Content-Type", formatHeaderWithParams(t, params)) +} + +// ContentDisposition parses the Content-Disposition header field, as defined in +// RFC 2183. +func (h *Header) ContentDisposition() (disp string, params map[string]string, err error) { + return parseHeaderWithParams(h.Get("Content-Disposition")) +} + +// SetContentDisposition formats the Content-Disposition header field, as +// defined in RFC 2183. +func (h *Header) SetContentDisposition(disp string, params map[string]string) { + h.Set("Content-Disposition", formatHeaderWithParams(disp, params)) +} + +// Text parses a plaintext header field. The field charset is automatically +// decoded to UTF-8. If the header field's charset is unknown, the raw field +// value is returned and the error verifies IsUnknownCharset. +func (h *Header) Text(k string) (string, error) { + return decodeHeader(h.Get(k)) +} + +// SetText sets a plaintext header field. +func (h *Header) SetText(k, v string) { + h.Set(k, encodeHeader(v)) +} + +// Copy creates a stand-alone copy of the header. +func (h *Header) Copy() Header { + return Header{h.Header.Copy()} +} + +// Fields iterates over all the header fields. +// +// The header may not be mutated while iterating, except using HeaderFields.Del. +func (h *Header) Fields() HeaderFields { + return &headerFields{h.Header.Fields()} +} + +// FieldsByKey iterates over all fields having the specified key. +// +// The header may not be mutated while iterating, except using HeaderFields.Del. +func (h *Header) FieldsByKey(k string) HeaderFields { + return &headerFields{h.Header.FieldsByKey(k)} +} diff --git a/vendor/github.com/emersion/go-message/mail/address.go b/vendor/github.com/emersion/go-message/mail/address.go new file mode 100644 index 0000000000..3d3bbca117 --- /dev/null +++ b/vendor/github.com/emersion/go-message/mail/address.go @@ -0,0 +1,42 @@ +package mail + +import ( + "mime" + "net/mail" + "strings" + + "github.com/emersion/go-message" +) + +// Address represents a single mail address. +// The type alias ensures that a net/mail.Address can be used wherever an +// Address is expected +type Address = mail.Address + +func formatAddressList(l []*Address) string { + formatted := make([]string, len(l)) + for i, a := range l { + formatted[i] = a.String() + } + return strings.Join(formatted, ", ") +} + +// ParseAddress parses a single RFC 5322 address, e.g. "Barry Gibbs " +// Use this function only if you parse from a string, if you have a Header use +// Header.AddressList instead +func ParseAddress(address string) (*Address, error) { + parser := mail.AddressParser{ + &mime.WordDecoder{message.CharsetReader}, + } + return parser.Parse(address) +} + +// ParseAddressList parses the given string as a list of addresses. +// Use this function only if you parse from a string, if you have a Header use +// Header.AddressList instead +func ParseAddressList(list string) ([]*Address, error) { + parser := mail.AddressParser{ + &mime.WordDecoder{message.CharsetReader}, + } + return parser.ParseList(list) +} diff --git a/vendor/github.com/emersion/go-message/mail/attachment.go b/vendor/github.com/emersion/go-message/mail/attachment.go new file mode 100644 index 0000000000..3fbbce2661 --- /dev/null +++ b/vendor/github.com/emersion/go-message/mail/attachment.go @@ -0,0 +1,30 @@ +package mail + +import ( + "github.com/emersion/go-message" +) + +// An AttachmentHeader represents an attachment's header. +type AttachmentHeader struct { + message.Header +} + +// Filename parses the attachment's filename. +func (h *AttachmentHeader) Filename() (string, error) { + _, params, err := h.ContentDisposition() + + filename, ok := params["filename"] + if !ok { + // Using "name" in Content-Type is discouraged + _, params, err = h.ContentType() + filename = params["name"] + } + + return filename, err +} + +// SetFilename formats the attachment's filename. +func (h *AttachmentHeader) SetFilename(filename string) { + dispParams := map[string]string{"filename": filename} + h.SetContentDisposition("attachment", dispParams) +} diff --git a/vendor/github.com/emersion/go-message/mail/header.go b/vendor/github.com/emersion/go-message/mail/header.go new file mode 100644 index 0000000000..4dd89ef657 --- /dev/null +++ b/vendor/github.com/emersion/go-message/mail/header.go @@ -0,0 +1,381 @@ +package mail + +import ( + "crypto/rand" + "encoding/binary" + "errors" + "fmt" + "net/mail" + "os" + "strconv" + "strings" + "time" + "unicode/utf8" + + "github.com/emersion/go-message" +) + +const dateLayout = "Mon, 02 Jan 2006 15:04:05 -0700" + +type headerParser struct { + s string +} + +func (p *headerParser) len() int { + return len(p.s) +} + +func (p *headerParser) empty() bool { + return p.len() == 0 +} + +func (p *headerParser) peek() byte { + return p.s[0] +} + +func (p *headerParser) consume(c byte) bool { + if p.empty() || p.peek() != c { + return false + } + p.s = p.s[1:] + return true +} + +// skipSpace skips the leading space and tab characters. +func (p *headerParser) skipSpace() { + p.s = strings.TrimLeft(p.s, " \t") +} + +// skipCFWS skips CFWS as defined in RFC5322. It returns false if the CFWS is +// malformed. +func (p *headerParser) skipCFWS() bool { + p.skipSpace() + + for { + if !p.consume('(') { + break + } + + if _, ok := p.consumeComment(); !ok { + return false + } + + p.skipSpace() + } + + return true +} + +func (p *headerParser) consumeComment() (string, bool) { + // '(' already consumed. + depth := 1 + + var comment string + for { + if p.empty() || depth == 0 { + break + } + + if p.peek() == '\\' && p.len() > 1 { + p.s = p.s[1:] + } else if p.peek() == '(' { + depth++ + } else if p.peek() == ')' { + depth-- + } + + if depth > 0 { + comment += p.s[:1] + } + + p.s = p.s[1:] + } + + return comment, depth == 0 +} + +func (p *headerParser) parseAtomText(dot bool) (string, error) { + i := 0 + for { + r, size := utf8.DecodeRuneInString(p.s[i:]) + if size == 1 && r == utf8.RuneError { + return "", fmt.Errorf("mail: invalid UTF-8 in atom-text: %q", p.s) + } else if size == 0 || !isAtext(r, dot) { + break + } + i += size + } + if i == 0 { + return "", errors.New("mail: invalid string") + } + + var atom string + atom, p.s = p.s[:i], p.s[i:] + return atom, nil +} + +func isAtext(r rune, dot bool) bool { + switch r { + case '.': + return dot + // RFC 5322 3.2.3 specials + case '(', ')', '[', ']', ';', '@', '\\', ',': + return false + case '<', '>', '"', ':': + return false + } + return isVchar(r) +} + +// isVchar reports whether r is an RFC 5322 VCHAR character. +func isVchar(r rune) bool { + // Visible (printing) characters + return '!' <= r && r <= '~' || isMultibyte(r) +} + +// isMultibyte reports whether r is a multi-byte UTF-8 character +// as supported by RFC 6532 +func isMultibyte(r rune) bool { + return r >= utf8.RuneSelf +} + +func (p *headerParser) parseNoFoldLiteral() (string, error) { + if !p.consume('[') { + return "", errors.New("mail: missing '[' in no-fold-literal") + } + + i := 0 + for { + r, size := utf8.DecodeRuneInString(p.s[i:]) + if size == 1 && r == utf8.RuneError { + return "", fmt.Errorf("mail: invalid UTF-8 in no-fold-literal: %q", p.s) + } else if size == 0 || !isDtext(r) { + break + } + i += size + } + var lit string + lit, p.s = p.s[:i], p.s[i:] + + if !p.consume(']') { + return "", errors.New("mail: missing ']' in no-fold-literal") + } + return "[" + lit + "]", nil +} + +func isDtext(r rune) bool { + switch r { + case '[', ']', '\\': + return false + } + return isVchar(r) +} + +func (p *headerParser) parseMsgID() (string, error) { + if !p.skipCFWS() { + return "", errors.New("mail: malformed parenthetical comment") + } + + if !p.consume('<') { + return "", errors.New("mail: missing '<' in msg-id") + } + + left, err := p.parseAtomText(true) + if err != nil { + return "", err + } + + if !p.consume('@') { + return "", errors.New("mail: missing '@' in msg-id") + } + + var right string + if !p.empty() && p.peek() == '[' { + // no-fold-literal + right, err = p.parseNoFoldLiteral() + } else { + right, err = p.parseAtomText(true) + } + if err != nil { + return "", err + } + + if !p.consume('>') { + return "", errors.New("mail: missing '>' in msg-id") + } + + if !p.skipCFWS() { + return "", errors.New("mail: malformed parenthetical comment") + } + + return left + "@" + right, nil +} + +// A Header is a mail header. +type Header struct { + message.Header +} + +// HeaderFromMap creates a header from a map of header fields. +// +// This function is provided for interoperability with the standard library. +// If possible, ReadHeader should be used instead to avoid loosing information. +// The map representation looses the ordering of the fields, the capitalization +// of the header keys, and the whitespace of the original header. +func HeaderFromMap(m map[string][]string) Header { + return Header{message.HeaderFromMap(m)} +} + +// AddressList parses the named header field as a list of addresses. If the +// header field is missing, it returns nil. +// +// This can be used on From, Sender, Reply-To, To, Cc and Bcc header fields. +func (h *Header) AddressList(key string) ([]*Address, error) { + v := h.Get(key) + if v == "" { + return nil, nil + } + return ParseAddressList(v) +} + +// SetAddressList formats the named header field to the provided list of +// addresses. +// +// This can be used on From, Sender, Reply-To, To, Cc and Bcc header fields. +func (h *Header) SetAddressList(key string, addrs []*Address) { + if len(addrs) > 0 { + h.Set(key, formatAddressList(addrs)) + } else { + h.Del(key) + } +} + +// Date parses the Date header field. If the header field is missing, it +// returns the zero time. +func (h *Header) Date() (time.Time, error) { + v := h.Get("Date") + if v == "" { + return time.Time{}, nil + } + return mail.ParseDate(v) +} + +// SetDate formats the Date header field. +func (h *Header) SetDate(t time.Time) { + if !t.IsZero() { + h.Set("Date", t.Format(dateLayout)) + } else { + h.Del("Date") + } +} + +// Subject parses the Subject header field. If there is an error, the raw field +// value is returned alongside the error. +func (h *Header) Subject() (string, error) { + return h.Text("Subject") +} + +// SetSubject formats the Subject header field. +func (h *Header) SetSubject(s string) { + h.SetText("Subject", s) +} + +// MessageID parses the Message-ID field. It returns the message identifier, +// without the angle brackets. If the message doesn't have a Message-ID header +// field, it returns an empty string. +func (h *Header) MessageID() (string, error) { + v := h.Get("Message-Id") + if v == "" { + return "", nil + } + + p := headerParser{v} + return p.parseMsgID() +} + +// MsgIDList parses a list of message identifiers. It returns message +// identifiers without angle brackets. If the header field is missing, it +// returns nil. +// +// This can be used on In-Reply-To and References header fields. +func (h *Header) MsgIDList(key string) ([]string, error) { + v := h.Get(key) + if v == "" { + return nil, nil + } + + p := headerParser{v} + var l []string + for !p.empty() { + msgID, err := p.parseMsgID() + if err != nil { + return l, err + } + l = append(l, msgID) + } + + return l, nil +} + +// GenerateMessageID wraps GenerateMessageIDWithHostname and therefore uses the +// hostname of the local machine. This is done to not break existing software. +// Wherever possible better use GenerateMessageIDWithHostname, because the local +// hostname of a machine tends to not be unique nor a FQDN which especially +// brings problems with spam filters. +func (h *Header) GenerateMessageID() error { + var err error + hostname, err := os.Hostname() + if err != nil { + return err + } + return h.GenerateMessageIDWithHostname(hostname) +} + +// GenerateMessageIDWithHostname generates an RFC 2822-compliant Message-Id +// based on the informational draft "Recommendations for generating Message +// IDs", it takes an hostname as argument, so that software using this library +// could use a hostname they know to be unique +func (h *Header) GenerateMessageIDWithHostname(hostname string) error { + now := uint64(time.Now().UnixNano()) + + nonceByte := make([]byte, 8) + if _, err := rand.Read(nonceByte); err != nil { + return err + } + nonce := binary.BigEndian.Uint64(nonceByte) + + msgID := fmt.Sprintf("%s.%s@%s", base36(now), base36(nonce), hostname) + h.SetMessageID(msgID) + return nil +} + +func base36(input uint64) string { + return strings.ToUpper(strconv.FormatUint(input, 36)) +} + +// SetMessageID sets the Message-ID field. id is the message identifier, +// without the angle brackets. +func (h *Header) SetMessageID(id string) { + if id != "" { + h.Set("Message-Id", "<"+id+">") + } else { + h.Del("Message-Id") + } +} + +// SetMsgIDList formats a list of message identifiers. Message identifiers +// don't include angle brackets. +// +// This can be used on In-Reply-To and References header fields. +func (h *Header) SetMsgIDList(key string, l []string) { + if len(l) > 0 { + h.Set(key, "<"+strings.Join(l, "> <")+">") + } else { + h.Del(key) + } +} + +// Copy creates a stand-alone copy of the header. +func (h *Header) Copy() Header { + return Header{h.Header.Copy()} +} diff --git a/vendor/github.com/emersion/go-message/mail/inline.go b/vendor/github.com/emersion/go-message/mail/inline.go new file mode 100644 index 0000000000..2aadfdcae9 --- /dev/null +++ b/vendor/github.com/emersion/go-message/mail/inline.go @@ -0,0 +1,10 @@ +package mail + +import ( + "github.com/emersion/go-message" +) + +// A InlineHeader represents a message text header. +type InlineHeader struct { + message.Header +} diff --git a/vendor/github.com/emersion/go-message/mail/mail.go b/vendor/github.com/emersion/go-message/mail/mail.go new file mode 100644 index 0000000000..2f9a12c919 --- /dev/null +++ b/vendor/github.com/emersion/go-message/mail/mail.go @@ -0,0 +1,9 @@ +// Package mail implements reading and writing mail messages. +// +// This package assumes that a mail message contains one or more text parts and +// zero or more attachment parts. Each text part represents a different version +// of the message content (e.g. a different type, a different language and so +// on). +// +// RFC 5322 defines the Internet Message Format. +package mail diff --git a/vendor/github.com/emersion/go-message/mail/reader.go b/vendor/github.com/emersion/go-message/mail/reader.go new file mode 100644 index 0000000000..f721a452bc --- /dev/null +++ b/vendor/github.com/emersion/go-message/mail/reader.go @@ -0,0 +1,130 @@ +package mail + +import ( + "container/list" + "io" + "strings" + + "github.com/emersion/go-message" +) + +// A PartHeader is a mail part header. It contains convenience functions to get +// and set header fields. +type PartHeader interface { + // Add adds the key, value pair to the header. + Add(key, value string) + // Del deletes the values associated with key. + Del(key string) + // Get gets the first value associated with the given key. If there are no + // values associated with the key, Get returns "". + Get(key string) string + // Set sets the header entries associated with key to the single element + // value. It replaces any existing values associated with key. + Set(key, value string) +} + +// A Part is either a mail text or an attachment. Header is either a InlineHeader +// or an AttachmentHeader. +type Part struct { + Header PartHeader + Body io.Reader +} + +// A Reader reads a mail message. +type Reader struct { + Header Header + + e *message.Entity + readers *list.List +} + +// NewReader creates a new mail reader. +func NewReader(e *message.Entity) *Reader { + mr := e.MultipartReader() + if mr == nil { + // Artificially create a multipart entity + // With this header, no error will be returned by message.NewMultipart + var h message.Header + h.Set("Content-Type", "multipart/mixed") + me, _ := message.NewMultipart(h, []*message.Entity{e}) + mr = me.MultipartReader() + } + + l := list.New() + l.PushBack(mr) + + return &Reader{Header{e.Header}, e, l} +} + +// CreateReader reads a mail header from r and returns a new mail reader. +// +// If the message uses an unknown transfer encoding or charset, CreateReader +// returns an error that verifies message.IsUnknownCharset, but also returns a +// Reader that can be used. +func CreateReader(r io.Reader) (*Reader, error) { + e, err := message.Read(r) + if err != nil && !message.IsUnknownCharset(err) { + return nil, err + } + + return NewReader(e), err +} + +// NextPart returns the next mail part. If there is no more part, io.EOF is +// returned as error. +// +// The returned Part.Body must be read completely before the next call to +// NextPart, otherwise it will be discarded. +// +// If the part uses an unknown transfer encoding or charset, NextPart returns an +// error that verifies message.IsUnknownCharset, but also returns a Part that +// can be used. +func (r *Reader) NextPart() (*Part, error) { + for r.readers.Len() > 0 { + e := r.readers.Back() + mr := e.Value.(message.MultipartReader) + + p, err := mr.NextPart() + if err == io.EOF { + // This whole multipart entity has been read, continue with the next one + r.readers.Remove(e) + continue + } else if err != nil && !message.IsUnknownCharset(err) { + return nil, err + } + + if pmr := p.MultipartReader(); pmr != nil { + // This is a multipart part, read it + r.readers.PushBack(pmr) + } else { + // This is a non-multipart part, return a mail part + mp := &Part{Body: p.Body} + t, _, _ := p.Header.ContentType() + disp, _, _ := p.Header.ContentDisposition() + if disp == "inline" || (disp != "attachment" && strings.HasPrefix(t, "text/")) { + mp.Header = &InlineHeader{p.Header} + } else { + mp.Header = &AttachmentHeader{p.Header} + } + return mp, err + } + } + + return nil, io.EOF +} + +// Close finishes the reader. +func (r *Reader) Close() error { + for r.readers.Len() > 0 { + e := r.readers.Back() + mr := e.Value.(message.MultipartReader) + + if err := mr.Close(); err != nil { + return err + } + + r.readers.Remove(e) + } + + return nil +} diff --git a/vendor/github.com/emersion/go-message/mail/writer.go b/vendor/github.com/emersion/go-message/mail/writer.go new file mode 100644 index 0000000000..6e6a0d24b0 --- /dev/null +++ b/vendor/github.com/emersion/go-message/mail/writer.go @@ -0,0 +1,132 @@ +package mail + +import ( + "io" + "strings" + + "github.com/emersion/go-message" +) + +func initInlineContentTransferEncoding(h *message.Header) { + if !h.Has("Content-Transfer-Encoding") { + t, _, _ := h.ContentType() + if strings.HasPrefix(t, "text/") { + h.Set("Content-Transfer-Encoding", "quoted-printable") + } else { + h.Set("Content-Transfer-Encoding", "base64") + } + } +} + +func initInlineHeader(h *InlineHeader) { + h.Set("Content-Disposition", "inline") + initInlineContentTransferEncoding(&h.Header) +} + +func initAttachmentHeader(h *AttachmentHeader) { + disp, _, _ := h.ContentDisposition() + if disp != "attachment" { + h.Set("Content-Disposition", "attachment") + } + if !h.Has("Content-Transfer-Encoding") { + h.Set("Content-Transfer-Encoding", "base64") + } +} + +// A Writer writes a mail message. A mail message contains one or more text +// parts and zero or more attachments. +type Writer struct { + mw *message.Writer +} + +// CreateWriter writes a mail header to w and creates a new Writer. +func CreateWriter(w io.Writer, header Header) (*Writer, error) { + header = header.Copy() // don't modify the caller's view + header.Set("Content-Type", "multipart/mixed") + + mw, err := message.CreateWriter(w, header.Header) + if err != nil { + return nil, err + } + + return &Writer{mw}, nil +} + +// CreateInlineWriter writes a mail header to w. The mail will contain an +// inline part, allowing to represent the same text in different formats. +// Attachments cannot be included. +func CreateInlineWriter(w io.Writer, header Header) (*InlineWriter, error) { + header = header.Copy() // don't modify the caller's view + header.Set("Content-Type", "multipart/alternative") + + mw, err := message.CreateWriter(w, header.Header) + if err != nil { + return nil, err + } + + return &InlineWriter{mw}, nil +} + +// CreateSingleInlineWriter writes a mail header to w. The mail will contain a +// single inline part. The body of the part should be written to the returned +// io.WriteCloser. Only one single inline part should be written, use +// CreateWriter if you want multiple parts. +func CreateSingleInlineWriter(w io.Writer, header Header) (io.WriteCloser, error) { + header = header.Copy() // don't modify the caller's view + initInlineContentTransferEncoding(&header.Header) + return message.CreateWriter(w, header.Header) +} + +// CreateInline creates a InlineWriter. One or more parts representing the same +// text in different formats can be written to a InlineWriter. +func (w *Writer) CreateInline() (*InlineWriter, error) { + var h message.Header + h.Set("Content-Type", "multipart/alternative") + + mw, err := w.mw.CreatePart(h) + if err != nil { + return nil, err + } + return &InlineWriter{mw}, nil +} + +// CreateSingleInline creates a new single text part with the provided header. +// The body of the part should be written to the returned io.WriteCloser. Only +// one single text part should be written, use CreateInline if you want multiple +// text parts. +func (w *Writer) CreateSingleInline(h InlineHeader) (io.WriteCloser, error) { + h = InlineHeader{h.Header.Copy()} // don't modify the caller's view + initInlineHeader(&h) + return w.mw.CreatePart(h.Header) +} + +// CreateAttachment creates a new attachment with the provided header. The body +// of the part should be written to the returned io.WriteCloser. +func (w *Writer) CreateAttachment(h AttachmentHeader) (io.WriteCloser, error) { + h = AttachmentHeader{h.Header.Copy()} // don't modify the caller's view + initAttachmentHeader(&h) + return w.mw.CreatePart(h.Header) +} + +// Close finishes the Writer. +func (w *Writer) Close() error { + return w.mw.Close() +} + +// InlineWriter writes a mail message's text. +type InlineWriter struct { + mw *message.Writer +} + +// CreatePart creates a new text part with the provided header. The body of the +// part should be written to the returned io.WriteCloser. +func (w *InlineWriter) CreatePart(h InlineHeader) (io.WriteCloser, error) { + h = InlineHeader{h.Header.Copy()} // don't modify the caller's view + initInlineHeader(&h) + return w.mw.CreatePart(h.Header) +} + +// Close finishes the InlineWriter. +func (w *InlineWriter) Close() error { + return w.mw.Close() +} diff --git a/vendor/github.com/emersion/go-message/message.go b/vendor/github.com/emersion/go-message/message.go new file mode 100644 index 0000000000..52cf115a55 --- /dev/null +++ b/vendor/github.com/emersion/go-message/message.go @@ -0,0 +1,15 @@ +// Package message implements reading and writing multipurpose messages. +// +// RFC 2045, RFC 2046 and RFC 2047 defines MIME, and RFC 2183 defines the +// Content-Disposition header field. +// +// Add this import to your package if you want to handle most common charsets +// by default: +// +// import ( +// _ "github.com/emersion/go-message/charset" +// ) +// +// Note, non-UTF-8 charsets are only supported when reading messages. Only +// UTF-8 is supported when writing messages. +package message diff --git a/vendor/github.com/emersion/go-message/multipart.go b/vendor/github.com/emersion/go-message/multipart.go new file mode 100644 index 0000000000..c406a3113f --- /dev/null +++ b/vendor/github.com/emersion/go-message/multipart.go @@ -0,0 +1,116 @@ +package message + +import ( + "io" + + "github.com/emersion/go-message/textproto" +) + +// MultipartReader is an iterator over parts in a MIME multipart body. +type MultipartReader interface { + io.Closer + + // NextPart returns the next part in the multipart or an error. When there are + // no more parts, the error io.EOF is returned. + // + // Entity.Body must be read completely before the next call to NextPart, + // otherwise it will be discarded. + NextPart() (*Entity, error) +} + +type multipartReader struct { + r *textproto.MultipartReader +} + +// NextPart implements MultipartReader. +func (r *multipartReader) NextPart() (*Entity, error) { + p, err := r.r.NextPart() + if err != nil { + return nil, err + } + return New(Header{p.Header}, p) +} + +// Close implements io.Closer. +func (r *multipartReader) Close() error { + return nil +} + +type multipartBody struct { + header Header + parts []*Entity + + r *io.PipeReader + w *Writer + + i int +} + +// Read implements io.Reader. +func (m *multipartBody) Read(p []byte) (n int, err error) { + if m.r == nil { + r, w := io.Pipe() + m.r = r + + var err error + m.w, err = createWriter(w, &m.header) + if err != nil { + return 0, err + } + + // Prevent calls to NextPart to succeed + m.i = len(m.parts) + + go func() { + if err := m.writeBodyTo(m.w); err != nil { + w.CloseWithError(err) + return + } + + if err := m.w.Close(); err != nil { + w.CloseWithError(err) + return + } + + w.Close() + }() + } + + return m.r.Read(p) +} + +// Close implements io.Closer. +func (m *multipartBody) Close() error { + if m.r != nil { + m.r.Close() + } + return nil +} + +// NextPart implements MultipartReader. +func (m *multipartBody) NextPart() (*Entity, error) { + if m.i >= len(m.parts) { + return nil, io.EOF + } + + part := m.parts[m.i] + m.i++ + return part, nil +} + +func (m *multipartBody) writeBodyTo(w *Writer) error { + for _, p := range m.parts { + pw, err := w.CreatePart(p.Header) + if err != nil { + return err + } + + if err := p.writeBodyTo(pw); err != nil { + return err + } + if err := pw.Close(); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/emersion/go-message/textproto/header.go b/vendor/github.com/emersion/go-message/textproto/header.go new file mode 100644 index 0000000000..10c04f319e --- /dev/null +++ b/vendor/github.com/emersion/go-message/textproto/header.go @@ -0,0 +1,677 @@ +package textproto + +import ( + "bufio" + "bytes" + "fmt" + "io" + "net/textproto" + "sort" + "strings" +) + +type headerField struct { + b []byte // Raw header field, including whitespace + + k string + v string +} + +func newHeaderField(k, v string, b []byte) *headerField { + return &headerField{k: textproto.CanonicalMIMEHeaderKey(k), v: v, b: b} +} + +func (f *headerField) raw() ([]byte, error) { + if f.b != nil { + return f.b, nil + } else { + for pos, ch := range f.k { + // check if character is a printable US-ASCII except ':' + if !(ch >= '!' && ch < ':' || ch > ':' && ch <= '~') { + return nil, fmt.Errorf("field name contains incorrect symbols (\\x%x at %v)", ch, pos) + } + } + + if pos := strings.IndexAny(f.v, "\r\n"); pos != -1 { + return nil, fmt.Errorf("field value contains \\r\\n (at %v)", pos) + } + + return []byte(formatHeaderField(f.k, f.v)), nil + } +} + +// A Header represents the key-value pairs in a message header. +// +// The header representation is idempotent: if the header can be read and +// written, the result will be exactly the same as the original (including +// whitespace and header field ordering). This is required for e.g. DKIM. +// +// Mutating the header is restricted: the only two allowed operations are +// inserting a new header field at the top and deleting a header field. This is +// again necessary for DKIM. +type Header struct { + // Fields are in reverse order so that inserting a new field at the top is + // cheap. + l []*headerField + m map[string][]*headerField +} + +func makeHeaderMap(fs []*headerField) map[string][]*headerField { + if len(fs) == 0 { + return nil + } + + m := make(map[string][]*headerField, len(fs)) + for i, f := range fs { + m[f.k] = append(m[f.k], fs[i]) + } + return m +} + +func newHeader(fs []*headerField) Header { + // Reverse order + for i := len(fs)/2 - 1; i >= 0; i-- { + opp := len(fs) - 1 - i + fs[i], fs[opp] = fs[opp], fs[i] + } + + return Header{l: fs, m: makeHeaderMap(fs)} +} + +// HeaderFromMap creates a header from a map of header fields. +// +// This function is provided for interoperability with the standard library. +// If possible, ReadHeader should be used instead to avoid loosing information. +// The map representation looses the ordering of the fields, the capitalization +// of the header keys, and the whitespace of the original header. +func HeaderFromMap(m map[string][]string) Header { + fs := make([]*headerField, 0, len(m)) + for k, vs := range m { + for _, v := range vs { + fs = append(fs, newHeaderField(k, v, nil)) + } + } + + sort.SliceStable(fs, func(i, j int) bool { + return fs[i].k < fs[j].k + }) + + return newHeader(fs) +} + +// AddRaw adds the raw key, value pair to the header. +// +// The supplied byte slice should be a complete field in the "Key: Value" form +// including trailing CRLF. If there is no comma in the input - AddRaw panics. +// No changes are made to kv contents and it will be copied into WriteHeader +// output as is. +// +// kv is directly added to the underlying structure and therefore should not be +// modified after the AddRaw call. +func (h *Header) AddRaw(kv []byte) { + colon := bytes.IndexByte(kv, ':') + if colon == -1 { + panic("textproto: Header.AddRaw: missing colon") + } + k := textproto.CanonicalMIMEHeaderKey(string(trim(kv[:colon]))) + v := trimAroundNewlines(kv[colon+1:]) + + if h.m == nil { + h.m = make(map[string][]*headerField) + } + + f := newHeaderField(k, v, kv) + h.l = append(h.l, f) + h.m[k] = append(h.m[k], f) +} + +// Add adds the key, value pair to the header. It prepends to any existing +// fields associated with key. +// +// Key and value should obey character requirements of RFC 6532. +// If you need to format or fold lines manually, use AddRaw. +func (h *Header) Add(k, v string) { + k = textproto.CanonicalMIMEHeaderKey(k) + + if h.m == nil { + h.m = make(map[string][]*headerField) + } + + f := newHeaderField(k, v, nil) + h.l = append(h.l, f) + h.m[k] = append(h.m[k], f) +} + +// Get gets the first value associated with the given key. If there are no +// values associated with the key, Get returns "". +func (h *Header) Get(k string) string { + fields := h.m[textproto.CanonicalMIMEHeaderKey(k)] + if len(fields) == 0 { + return "" + } + return fields[len(fields)-1].v +} + +// Raw gets the first raw header field associated with the given key. +// +// The returned bytes contain a complete field in the "Key: value" form, +// including trailing CRLF. +// +// The returned slice should not be modified and becomes invalid when the +// header is updated. +// +// An error is returned if the header field contains incorrect characters (see +// RFC 6532). +func (h *Header) Raw(k string) ([]byte, error) { + fields := h.m[textproto.CanonicalMIMEHeaderKey(k)] + if len(fields) == 0 { + return nil, nil + } + return fields[len(fields)-1].raw() +} + +// Values returns all values associated with the given key. +// +// The returned slice should not be modified and becomes invalid when the +// header is updated. +func (h *Header) Values(k string) []string { + fields := h.m[textproto.CanonicalMIMEHeaderKey(k)] + if len(fields) == 0 { + return nil + } + l := make([]string, len(fields)) + for i, field := range fields { + l[len(fields)-i-1] = field.v + } + return l +} + +// Set sets the header fields associated with key to the single field value. +// It replaces any existing values associated with key. +func (h *Header) Set(k, v string) { + h.Del(k) + h.Add(k, v) +} + +// Del deletes the values associated with key. +func (h *Header) Del(k string) { + k = textproto.CanonicalMIMEHeaderKey(k) + + delete(h.m, k) + + // Delete existing keys + for i := len(h.l) - 1; i >= 0; i-- { + if h.l[i].k == k { + h.l = append(h.l[:i], h.l[i+1:]...) + } + } +} + +// Has checks whether the header has a field with the specified key. +func (h *Header) Has(k string) bool { + _, ok := h.m[textproto.CanonicalMIMEHeaderKey(k)] + return ok +} + +// Copy creates an independent copy of the header. +func (h *Header) Copy() Header { + l := make([]*headerField, len(h.l)) + copy(l, h.l) + m := makeHeaderMap(l) + return Header{l: l, m: m} +} + +// Len returns the number of fields in the header. +func (h *Header) Len() int { + return len(h.l) +} + +// Map returns all header fields as a map. +// +// This function is provided for interoperability with the standard library. +// If possible, Fields should be used instead to avoid loosing information. +// The map representation looses the ordering of the fields, the capitalization +// of the header keys, and the whitespace of the original header. +func (h *Header) Map() map[string][]string { + m := make(map[string][]string, h.Len()) + fields := h.Fields() + for fields.Next() { + m[fields.Key()] = append(m[fields.Key()], fields.Value()) + } + return m +} + +// HeaderFields iterates over header fields. Its cursor starts before the first +// field of the header. Use Next to advance from field to field. +type HeaderFields interface { + // Next advances to the next header field. It returns true on success, or + // false if there is no next field. + Next() (more bool) + // Key returns the key of the current field. + Key() string + // Value returns the value of the current field. + Value() string + // Raw returns the raw current header field. See Header.Raw. + Raw() ([]byte, error) + // Del deletes the current field. + Del() + // Len returns the amount of header fields in the subset of header iterated + // by this HeaderFields instance. + // + // For Fields(), it will return the amount of fields in the whole header section. + // For FieldsByKey(), it will return the amount of fields with certain key. + Len() int +} + +type headerFields struct { + h *Header + cur int +} + +func (fs *headerFields) Next() bool { + fs.cur++ + return fs.cur < len(fs.h.l) +} + +func (fs *headerFields) index() int { + if fs.cur < 0 { + panic("message: HeaderFields method called before Next") + } + if fs.cur >= len(fs.h.l) { + panic("message: HeaderFields method called after Next returned false") + } + return len(fs.h.l) - fs.cur - 1 +} + +func (fs *headerFields) field() *headerField { + return fs.h.l[fs.index()] +} + +func (fs *headerFields) Key() string { + return fs.field().k +} + +func (fs *headerFields) Value() string { + return fs.field().v +} + +func (fs *headerFields) Raw() ([]byte, error) { + return fs.field().raw() +} + +func (fs *headerFields) Del() { + f := fs.field() + + ok := false + for i, ff := range fs.h.m[f.k] { + if ff == f { + ok = true + fs.h.m[f.k] = append(fs.h.m[f.k][:i], fs.h.m[f.k][i+1:]...) + if len(fs.h.m[f.k]) == 0 { + delete(fs.h.m, f.k) + } + break + } + } + if !ok { + panic("message: field not found in Header.m") + } + + fs.h.l = append(fs.h.l[:fs.index()], fs.h.l[fs.index()+1:]...) + fs.cur-- +} + +func (fs *headerFields) Len() int { + return len(fs.h.l) +} + +// Fields iterates over all the header fields. +// +// The header may not be mutated while iterating, except using HeaderFields.Del. +func (h *Header) Fields() HeaderFields { + return &headerFields{h, -1} +} + +type headerFieldsByKey struct { + h *Header + k string + cur int +} + +func (fs *headerFieldsByKey) Next() bool { + fs.cur++ + return fs.cur < len(fs.h.m[fs.k]) +} + +func (fs *headerFieldsByKey) index() int { + if fs.cur < 0 { + panic("message: headerfields.key or value called before next") + } + if fs.cur >= len(fs.h.m[fs.k]) { + panic("message: headerfields.key or value called after next returned false") + } + return len(fs.h.m[fs.k]) - fs.cur - 1 +} + +func (fs *headerFieldsByKey) field() *headerField { + return fs.h.m[fs.k][fs.index()] +} + +func (fs *headerFieldsByKey) Key() string { + return fs.field().k +} + +func (fs *headerFieldsByKey) Value() string { + return fs.field().v +} + +func (fs *headerFieldsByKey) Raw() ([]byte, error) { + return fs.field().raw() +} + +func (fs *headerFieldsByKey) Del() { + f := fs.field() + + ok := false + for i := range fs.h.l { + if f == fs.h.l[i] { + ok = true + fs.h.l = append(fs.h.l[:i], fs.h.l[i+1:]...) + break + } + } + if !ok { + panic("message: field not found in Header.l") + } + + fs.h.m[fs.k] = append(fs.h.m[fs.k][:fs.index()], fs.h.m[fs.k][fs.index()+1:]...) + if len(fs.h.m[fs.k]) == 0 { + delete(fs.h.m, fs.k) + } + fs.cur-- +} + +func (fs *headerFieldsByKey) Len() int { + return len(fs.h.m[fs.k]) +} + +// FieldsByKey iterates over all fields having the specified key. +// +// The header may not be mutated while iterating, except using HeaderFields.Del. +func (h *Header) FieldsByKey(k string) HeaderFields { + return &headerFieldsByKey{h, textproto.CanonicalMIMEHeaderKey(k), -1} +} + +func readLineSlice(r *bufio.Reader, line []byte) ([]byte, error) { + for { + l, more, err := r.ReadLine() + line = append(line, l...) + if err != nil { + return line, err + } + + if !more { + break + } + } + + return line, nil +} + +func isSpace(c byte) bool { + return c == ' ' || c == '\t' +} + +func validHeaderKeyByte(b byte) bool { + c := int(b) + return c >= 33 && c <= 126 && c != ':' +} + +// trim returns s with leading and trailing spaces and tabs removed. +// It does not assume Unicode or UTF-8. +func trim(s []byte) []byte { + i := 0 + for i < len(s) && isSpace(s[i]) { + i++ + } + n := len(s) + for n > i && isSpace(s[n-1]) { + n-- + } + return s[i:n] +} + +func hasContinuationLine(r *bufio.Reader) bool { + c, err := r.ReadByte() + if err != nil { + return false // bufio will keep err until next read. + } + r.UnreadByte() + return isSpace(c) +} + +func readContinuedLineSlice(r *bufio.Reader) ([]byte, error) { + // Read the first line. We preallocate slice that it enough + // for most fields. + line, err := readLineSlice(r, make([]byte, 0, 256)) + if err == io.EOF && len(line) == 0 { + // Header without a body + return nil, nil + } else if err != nil { + return nil, err + } + + if len(line) == 0 { // blank line - no continuation + return line, nil + } + + line = append(line, '\r', '\n') + + // Read continuation lines. + for hasContinuationLine(r) { + line, err = readLineSlice(r, line) + if err != nil { + break // bufio will keep err until next read. + } + + line = append(line, '\r', '\n') + } + + return line, nil +} + +func writeContinued(b *strings.Builder, l []byte) { + // Strip trailing \r, if any + if len(l) > 0 && l[len(l)-1] == '\r' { + l = l[:len(l)-1] + } + l = trim(l) + if len(l) == 0 { + return + } + if b.Len() > 0 { + b.WriteByte(' ') + } + b.Write(l) +} + +// Strip newlines and spaces around newlines. +func trimAroundNewlines(v []byte) string { + var b strings.Builder + b.Grow(len(v)) + for { + i := bytes.IndexByte(v, '\n') + if i < 0 { + writeContinued(&b, v) + break + } + writeContinued(&b, v[:i]) + v = v[i+1:] + } + + return b.String() +} + +// ReadHeader reads a MIME header from r. The header is a sequence of possibly +// continued "Key: Value" lines ending in a blank line. +// +// To avoid denial of service attacks, the provided bufio.Reader should be +// reading from an io.LimitedReader or a similar Reader to bound the size of +// headers. +func ReadHeader(r *bufio.Reader) (Header, error) { + fs := make([]*headerField, 0, 32) + + // The first line cannot start with a leading space. + if buf, err := r.Peek(1); err == nil && isSpace(buf[0]) { + line, err := readLineSlice(r, nil) + if err != nil { + return newHeader(fs), err + } + + return newHeader(fs), fmt.Errorf("message: malformed MIME header initial line: %v", string(line)) + } + + for { + kv, err := readContinuedLineSlice(r) + if len(kv) == 0 { + return newHeader(fs), err + } + + // Key ends at first colon; should not have trailing spaces but they + // appear in the wild, violating specs, so we remove them if present. + i := bytes.IndexByte(kv, ':') + if i < 0 { + return newHeader(fs), fmt.Errorf("message: malformed MIME header line: %v", string(kv)) + } + + keyBytes := trim(kv[:i]) + + // Verify that there are no invalid characters in the header key. + // See RFC 5322 Section 2.2 + for _, c := range keyBytes { + if !validHeaderKeyByte(c) { + return newHeader(fs), fmt.Errorf("message: malformed MIME header key: %v", string(keyBytes)) + } + } + + key := textproto.CanonicalMIMEHeaderKey(string(keyBytes)) + + // As per RFC 7230 field-name is a token, tokens consist of one or more + // chars. We could return a an error here, but better to be liberal in + // what we accept, so if we get an empty key, skip it. + if key == "" { + continue + } + + i++ // skip colon + v := kv[i:] + + value := trimAroundNewlines(v) + fs = append(fs, newHeaderField(key, value, kv)) + + if err != nil { + return newHeader(fs), err + } + } +} + +func foldLine(v string, maxlen int) (line, next string, ok bool) { + ok = true + + // We'll need to fold before maxlen + foldBefore := maxlen + 1 + foldAt := len(v) + + var folding string + if foldBefore > len(v) { + // We reached the end of the string + if v[len(v)-1] != '\n' { + // If there isn't already a trailing CRLF, insert one + folding = "\r\n" + } + } else { + // Find the closest whitespace before maxlen + foldAt = strings.LastIndexAny(v[:foldBefore], " \t\n") + + if foldAt == 0 { + // The whitespace we found was the previous folding WSP + foldAt = foldBefore - 1 + } else if foldAt < 0 { + // We didn't find any whitespace, we have to insert one + foldAt = foldBefore - 2 + } + + switch v[foldAt] { + case ' ', '\t': + if v[foldAt-1] != '\n' { + folding = "\r\n" // The next char will be a WSP, don't need to insert one + } + case '\n': + folding = "" // There is already a CRLF, nothing to do + default: + // Another char, we need to insert CRLF + WSP. This will insert an + // extra space in the string, so this should be avoided if + // possible. + folding = "\r\n " + ok = false + } + } + + return v[:foldAt] + folding, v[foldAt:], ok +} + +const ( + preferredHeaderLen = 76 + maxHeaderLen = 998 +) + +// formatHeaderField formats a header field, ensuring each line is no longer +// than 76 characters. It tries to fold lines at whitespace characters if +// possible. If the header contains a word longer than this limit, it will be +// split. +func formatHeaderField(k, v string) string { + s := k + ": " + + if v == "" { + return s + "\r\n" + } + + first := true + for len(v) > 0 { + // If this is the first line, substract the length of the key + keylen := 0 + if first { + keylen = len(s) + } + + // First try with a soft limit + l, next, ok := foldLine(v, preferredHeaderLen-keylen) + if !ok { + // Folding failed to preserve the original header field value. Try + // with a larger, hard limit. + l, next, _ = foldLine(v, maxHeaderLen-keylen) + } + v = next + s += l + first = false + } + + return s +} + +// WriteHeader writes a MIME header to w. +func WriteHeader(w io.Writer, h Header) error { + for i := len(h.l) - 1; i >= 0; i-- { + f := h.l[i] + if rawField, err := f.raw(); err == nil { + if _, err := w.Write(rawField); err != nil { + return err + } + } else { + return fmt.Errorf("failed to write header field #%v (%q): %w", len(h.l)-i, f.k, err) + } + } + + _, err := w.Write([]byte{'\r', '\n'}) + return err +} diff --git a/vendor/github.com/emersion/go-message/textproto/multipart.go b/vendor/github.com/emersion/go-message/textproto/multipart.go new file mode 100644 index 0000000000..62824dfdd6 --- /dev/null +++ b/vendor/github.com/emersion/go-message/textproto/multipart.go @@ -0,0 +1,474 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// + +package textproto + +// Multipart is defined in RFC 2046. + +import ( + "bufio" + "bytes" + "crypto/rand" + "errors" + "fmt" + "io" + "io/ioutil" +) + +var emptyParams = make(map[string]string) + +// This constant needs to be at least 76 for this package to work correctly. +// This is because \r\n--separator_of_len_70- would fill the buffer and it +// wouldn't be safe to consume a single byte from it. +const peekBufferSize = 4096 + +// A Part represents a single part in a multipart body. +type Part struct { + Header Header + + mr *MultipartReader + + // r is either a reader directly reading from mr + r io.Reader + + n int // known data bytes waiting in mr.bufReader + total int64 // total data bytes read already + err error // error to return when n == 0 + readErr error // read error observed from mr.bufReader +} + +// NewMultipartReader creates a new multipart reader reading from r using the +// given MIME boundary. +// +// The boundary is usually obtained from the "boundary" parameter of +// the message's "Content-Type" header. Use mime.ParseMediaType to +// parse such headers. +func NewMultipartReader(r io.Reader, boundary string) *MultipartReader { + b := []byte("\r\n--" + boundary + "--") + return &MultipartReader{ + bufReader: bufio.NewReaderSize(&stickyErrorReader{r: r}, peekBufferSize), + nl: b[:2], + nlDashBoundary: b[:len(b)-2], + dashBoundaryDash: b[2:], + dashBoundary: b[2 : len(b)-2], + } +} + +// stickyErrorReader is an io.Reader which never calls Read on its +// underlying Reader once an error has been seen. (the io.Reader +// interface's contract promises nothing about the return values of +// Read calls after an error, yet this package does do multiple Reads +// after error) +type stickyErrorReader struct { + r io.Reader + err error +} + +func (r *stickyErrorReader) Read(p []byte) (n int, _ error) { + if r.err != nil { + return 0, r.err + } + n, r.err = r.r.Read(p) + return n, r.err +} + +func newPart(mr *MultipartReader) (*Part, error) { + bp := &Part{mr: mr} + if err := bp.populateHeaders(); err != nil { + return nil, err + } + bp.r = partReader{bp} + return bp, nil +} + +func (bp *Part) populateHeaders() error { + header, err := ReadHeader(bp.mr.bufReader) + if err == nil { + bp.Header = header + } + return err +} + +// Read reads the body of a part, after its headers and before the +// next part (if any) begins. +func (p *Part) Read(d []byte) (n int, err error) { + return p.r.Read(d) +} + +// partReader implements io.Reader by reading raw bytes directly from the +// wrapped *Part, without doing any Transfer-Encoding decoding. +type partReader struct { + p *Part +} + +func (pr partReader) Read(d []byte) (int, error) { + p := pr.p + br := p.mr.bufReader + + // Read into buffer until we identify some data to return, + // or we find a reason to stop (boundary or read error). + for p.n == 0 && p.err == nil { + peek, _ := br.Peek(br.Buffered()) + p.n, p.err = scanUntilBoundary(peek, p.mr.dashBoundary, p.mr.nlDashBoundary, p.total, p.readErr) + if p.n == 0 && p.err == nil { + // Force buffered I/O to read more into buffer. + _, p.readErr = br.Peek(len(peek) + 1) + if p.readErr == io.EOF { + p.readErr = io.ErrUnexpectedEOF + } + } + } + + // Read out from "data to return" part of buffer. + if p.n == 0 { + return 0, p.err + } + n := len(d) + if n > p.n { + n = p.n + } + n, _ = br.Read(d[:n]) + p.total += int64(n) + p.n -= n + if p.n == 0 { + return n, p.err + } + return n, nil +} + +// scanUntilBoundary scans buf to identify how much of it can be safely +// returned as part of the Part body. +// dashBoundary is "--boundary". +// nlDashBoundary is "\r\n--boundary" or "\n--boundary", depending on what mode we are in. +// The comments below (and the name) assume "\n--boundary", but either is accepted. +// total is the number of bytes read out so far. If total == 0, then a leading "--boundary" is recognized. +// readErr is the read error, if any, that followed reading the bytes in buf. +// scanUntilBoundary returns the number of data bytes from buf that can be +// returned as part of the Part body and also the error to return (if any) +// once those data bytes are done. +func scanUntilBoundary(buf, dashBoundary, nlDashBoundary []byte, total int64, readErr error) (int, error) { + if total == 0 { + // At beginning of body, allow dashBoundary. + if bytes.HasPrefix(buf, dashBoundary) { + switch matchAfterPrefix(buf, dashBoundary, readErr) { + case -1: + return len(dashBoundary), nil + case 0: + return 0, nil + case +1: + return 0, io.EOF + } + } + if bytes.HasPrefix(dashBoundary, buf) { + return 0, readErr + } + } + + // Search for "\n--boundary". + if i := bytes.Index(buf, nlDashBoundary); i >= 0 { + switch matchAfterPrefix(buf[i:], nlDashBoundary, readErr) { + case -1: + return i + len(nlDashBoundary), nil + case 0: + return i, nil + case +1: + return i, io.EOF + } + } + if bytes.HasPrefix(nlDashBoundary, buf) { + return 0, readErr + } + + // Otherwise, anything up to the final \n is not part of the boundary + // and so must be part of the body. + // Also if the section from the final \n onward is not a prefix of the boundary, + // it too must be part of the body. + i := bytes.LastIndexByte(buf, nlDashBoundary[0]) + if i >= 0 && bytes.HasPrefix(nlDashBoundary, buf[i:]) { + return i, nil + } + return len(buf), readErr +} + +// matchAfterPrefix checks whether buf should be considered to match the boundary. +// The prefix is "--boundary" or "\r\n--boundary" or "\n--boundary", +// and the caller has verified already that bytes.HasPrefix(buf, prefix) is true. +// +// matchAfterPrefix returns +1 if the buffer does match the boundary, +// meaning the prefix is followed by a dash, space, tab, cr, nl, or end of input. +// It returns -1 if the buffer definitely does NOT match the boundary, +// meaning the prefix is followed by some other character. +// For example, "--foobar" does not match "--foo". +// It returns 0 more input needs to be read to make the decision, +// meaning that len(buf) == len(prefix) and readErr == nil. +func matchAfterPrefix(buf, prefix []byte, readErr error) int { + if len(buf) == len(prefix) { + if readErr != nil { + return +1 + } + return 0 + } + c := buf[len(prefix)] + if c == ' ' || c == '\t' || c == '\r' || c == '\n' || c == '-' { + return +1 + } + return -1 +} + +func (p *Part) Close() error { + io.Copy(ioutil.Discard, p) + return nil +} + +// MultipartReader is an iterator over parts in a MIME multipart body. +// MultipartReader's underlying parser consumes its input as needed. Seeking +// isn't supported. +type MultipartReader struct { + bufReader *bufio.Reader + + currentPart *Part + partsRead int + + nl []byte // "\r\n" or "\n" (set after seeing first boundary line) + nlDashBoundary []byte // nl + "--boundary" + dashBoundaryDash []byte // "--boundary--" + dashBoundary []byte // "--boundary" +} + +// NextPart returns the next part in the multipart or an error. +// When there are no more parts, the error io.EOF is returned. +func (r *MultipartReader) NextPart() (*Part, error) { + if r.currentPart != nil { + r.currentPart.Close() + } + if string(r.dashBoundary) == "--" { + return nil, fmt.Errorf("multipart: boundary is empty") + } + expectNewPart := false + for { + line, err := r.bufReader.ReadSlice('\n') + + if err == io.EOF && r.isFinalBoundary(line) { + // If the buffer ends in "--boundary--" without the + // trailing "\r\n", ReadSlice will return an error + // (since it's missing the '\n'), but this is a valid + // multipart EOF so we need to return io.EOF instead of + // a fmt-wrapped one. + return nil, io.EOF + } + if err != nil { + return nil, fmt.Errorf("multipart: NextPart: %v", err) + } + + if r.isBoundaryDelimiterLine(line) { + r.partsRead++ + bp, err := newPart(r) + if err != nil { + return nil, err + } + r.currentPart = bp + return bp, nil + } + + if r.isFinalBoundary(line) { + // Expected EOF + return nil, io.EOF + } + + if expectNewPart { + return nil, fmt.Errorf("multipart: expecting a new Part; got line %q", string(line)) + } + + if r.partsRead == 0 { + // skip line + continue + } + + // Consume the "\n" or "\r\n" separator between the + // body of the previous part and the boundary line we + // now expect will follow. (either a new part or the + // end boundary) + if bytes.Equal(line, r.nl) { + expectNewPart = true + continue + } + + return nil, fmt.Errorf("multipart: unexpected line in Next(): %q", line) + } +} + +// isFinalBoundary reports whether line is the final boundary line +// indicating that all parts are over. +// It matches `^--boundary--[ \t]*(\r\n)?$` +func (mr *MultipartReader) isFinalBoundary(line []byte) bool { + if !bytes.HasPrefix(line, mr.dashBoundaryDash) { + return false + } + rest := line[len(mr.dashBoundaryDash):] + rest = skipLWSPChar(rest) + return len(rest) == 0 || bytes.Equal(rest, mr.nl) +} + +func (mr *MultipartReader) isBoundaryDelimiterLine(line []byte) (ret bool) { + // https://tools.ietf.org/html/rfc2046#section-5.1 + // The boundary delimiter line is then defined as a line + // consisting entirely of two hyphen characters ("-", + // decimal value 45) followed by the boundary parameter + // value from the Content-Type header field, optional linear + // whitespace, and a terminating CRLF. + if !bytes.HasPrefix(line, mr.dashBoundary) { + return false + } + rest := line[len(mr.dashBoundary):] + rest = skipLWSPChar(rest) + + // On the first part, see our lines are ending in \n instead of \r\n + // and switch into that mode if so. This is a violation of the spec, + // but occurs in practice. + if mr.partsRead == 0 && len(rest) == 1 && rest[0] == '\n' { + mr.nl = mr.nl[1:] + mr.nlDashBoundary = mr.nlDashBoundary[1:] + } + return bytes.Equal(rest, mr.nl) +} + +// skipLWSPChar returns b with leading spaces and tabs removed. +// RFC 822 defines: +// +// LWSP-char = SPACE / HTAB +func skipLWSPChar(b []byte) []byte { + for len(b) > 0 && (b[0] == ' ' || b[0] == '\t') { + b = b[1:] + } + return b +} + +// A MultipartWriter generates multipart messages. +type MultipartWriter struct { + w io.Writer + boundary string + lastpart *part +} + +// NewMultipartWriter returns a new multipart Writer with a random boundary, +// writing to w. +func NewMultipartWriter(w io.Writer) *MultipartWriter { + return &MultipartWriter{ + w: w, + boundary: randomBoundary(), + } +} + +// Boundary returns the Writer's boundary. +func (w *MultipartWriter) Boundary() string { + return w.boundary +} + +// SetBoundary overrides the Writer's default randomly-generated +// boundary separator with an explicit value. +// +// SetBoundary must be called before any parts are created, may only +// contain certain ASCII characters, and must be non-empty and +// at most 70 bytes long. +func (w *MultipartWriter) SetBoundary(boundary string) error { + if w.lastpart != nil { + return errors.New("mime: SetBoundary called after write") + } + // rfc2046#section-5.1.1 + if len(boundary) < 1 || len(boundary) > 70 { + return errors.New("mime: invalid boundary length") + } + end := len(boundary) - 1 + for i, b := range boundary { + if 'A' <= b && b <= 'Z' || 'a' <= b && b <= 'z' || '0' <= b && b <= '9' { + continue + } + switch b { + case '\'', '(', ')', '+', '_', ',', '-', '.', '/', ':', '=', '?': + continue + case ' ': + if i != end { + continue + } + } + return errors.New("mime: invalid boundary character") + } + w.boundary = boundary + return nil +} + +func randomBoundary() string { + var buf [30]byte + _, err := io.ReadFull(rand.Reader, buf[:]) + if err != nil { + panic(err) + } + return fmt.Sprintf("%x", buf[:]) +} + +// CreatePart creates a new multipart section with the provided +// header. The body of the part should be written to the returned +// Writer. After calling CreatePart, any previous part may no longer +// be written to. +func (w *MultipartWriter) CreatePart(header Header) (io.Writer, error) { + if w.lastpart != nil { + if err := w.lastpart.close(); err != nil { + return nil, err + } + } + var b bytes.Buffer + if w.lastpart != nil { + fmt.Fprintf(&b, "\r\n--%s\r\n", w.boundary) + } else { + fmt.Fprintf(&b, "--%s\r\n", w.boundary) + } + + WriteHeader(&b, header) + + _, err := io.Copy(w.w, &b) + if err != nil { + return nil, err + } + p := &part{ + mw: w, + } + w.lastpart = p + return p, nil +} + +// Close finishes the multipart message and writes the trailing +// boundary end line to the output. +func (w *MultipartWriter) Close() error { + if w.lastpart != nil { + if err := w.lastpart.close(); err != nil { + return err + } + w.lastpart = nil + } + _, err := fmt.Fprintf(w.w, "\r\n--%s--\r\n", w.boundary) + return err +} + +type part struct { + mw *MultipartWriter + closed bool + we error // last error that occurred writing +} + +func (p *part) close() error { + p.closed = true + return p.we +} + +func (p *part) Write(d []byte) (n int, err error) { + if p.closed { + return 0, errors.New("multipart: can't write to finished part") + } + n, err = p.mw.w.Write(d) + if err != nil { + p.we = err + } + return +} diff --git a/vendor/github.com/emersion/go-message/textproto/textproto.go b/vendor/github.com/emersion/go-message/textproto/textproto.go new file mode 100644 index 0000000000..2fa994bd75 --- /dev/null +++ b/vendor/github.com/emersion/go-message/textproto/textproto.go @@ -0,0 +1,2 @@ +// Package textproto implements low-level manipulation of MIME messages. +package textproto diff --git a/vendor/github.com/emersion/go-message/writer.go b/vendor/github.com/emersion/go-message/writer.go new file mode 100644 index 0000000000..6a80da2b33 --- /dev/null +++ b/vendor/github.com/emersion/go-message/writer.go @@ -0,0 +1,134 @@ +package message + +import ( + "errors" + "fmt" + "io" + "strings" + + "github.com/emersion/go-message/textproto" +) + +// Writer writes message entities. +// +// If the message is not multipart, it should be used as a WriteCloser. Don't +// forget to call Close. +// +// If the message is multipart, users can either use CreatePart to write child +// parts or Write to directly pipe a multipart message. In any case, Close must +// be called at the end. +type Writer struct { + w io.Writer + c io.Closer + mw *textproto.MultipartWriter +} + +// createWriter creates a new Writer writing to w with the provided header. +// Nothing is written to w when it is called. header is modified in-place. +func createWriter(w io.Writer, header *Header) (*Writer, error) { + ww := &Writer{w: w} + + mediaType, mediaParams, _ := header.ContentType() + if strings.HasPrefix(mediaType, "multipart/") { + ww.mw = textproto.NewMultipartWriter(ww.w) + + // Do not set ww's io.Closer for now: if this is a multipart entity but + // CreatePart is not used (only Write is used), then the final boundary + // is expected to be written by the user too. In this case, ww.Close + // shouldn't write the final boundary. + + if mediaParams["boundary"] != "" { + ww.mw.SetBoundary(mediaParams["boundary"]) + } else { + mediaParams["boundary"] = ww.mw.Boundary() + header.SetContentType(mediaType, mediaParams) + } + + header.Del("Content-Transfer-Encoding") + } else { + wc, err := encodingWriter(header.Get("Content-Transfer-Encoding"), ww.w) + if err != nil { + return nil, err + } + ww.w = wc + ww.c = wc + } + + switch strings.ToLower(mediaParams["charset"]) { + case "", "us-ascii", "utf-8": + // This is OK + default: + // Anything else is invalid + return nil, fmt.Errorf("unhandled charset %q", mediaParams["charset"]) + } + + return ww, nil +} + +// CreateWriter creates a new message writer to w. If header contains an +// encoding, data written to the Writer will automatically be encoded with it. +// The charset needs to be utf-8 or us-ascii. +func CreateWriter(w io.Writer, header Header) (*Writer, error) { + // Ensure that modifications are invisible to the caller + header = header.Copy() + + // If the message uses MIME, it has to include MIME-Version + if !header.Has("Mime-Version") { + header.Set("MIME-Version", "1.0") + } + + ww, err := createWriter(w, &header) + if err != nil { + return nil, err + } + if err := textproto.WriteHeader(w, header.Header); err != nil { + return nil, err + } + return ww, nil +} + +// Write implements io.Writer. +func (w *Writer) Write(b []byte) (int, error) { + return w.w.Write(b) +} + +// Close implements io.Closer. +func (w *Writer) Close() error { + if w.c != nil { + return w.c.Close() + } + return nil +} + +// CreatePart returns a Writer to a new part in this multipart entity. If this +// entity is not multipart, it fails. The body of the part should be written to +// the returned io.WriteCloser. +func (w *Writer) CreatePart(header Header) (*Writer, error) { + if w.mw == nil { + return nil, errors.New("cannot create a part in a non-multipart message") + } + + if w.c == nil { + // We know that the user calls CreatePart so Close should write the final + // boundary + w.c = w.mw + } + + // cw -> ww -> pw -> w.mw -> w.w + + ww := &struct{ io.Writer }{nil} + + // ensure that modifications are invisible to the caller + header = header.Copy() + cw, err := createWriter(ww, &header) + if err != nil { + return nil, err + } + pw, err := w.mw.CreatePart(header.Header) + if err != nil { + return nil, err + } + + ww.Writer = pw + return cw, nil +} diff --git a/vendor/github.com/emersion/go-sasl/.build.yml b/vendor/github.com/emersion/go-sasl/.build.yml new file mode 100644 index 0000000000..daa6006dfd --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/.build.yml @@ -0,0 +1,19 @@ +image: alpine/latest +packages: + - go + # Required by codecov + - bash + - findutils +sources: + - https://github.com/emersion/go-sasl +tasks: + - build: | + cd go-sasl + go build -v ./... + - test: | + cd go-sasl + go test -coverprofile=coverage.txt -covermode=atomic ./... + - upload-coverage: | + cd go-sasl + export CODECOV_TOKEN=3f257f71-a128-4834-8f68-2b534e9f4cb1 + curl -s https://codecov.io/bash | bash diff --git a/vendor/github.com/emersion/go-sasl/.gitignore b/vendor/github.com/emersion/go-sasl/.gitignore new file mode 100644 index 0000000000..daf913b1b3 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/emersion/go-sasl/LICENSE b/vendor/github.com/emersion/go-sasl/LICENSE new file mode 100644 index 0000000000..dc1922e471 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 emersion + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/emersion/go-sasl/README.md b/vendor/github.com/emersion/go-sasl/README.md new file mode 100644 index 0000000000..6bd47ba818 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/README.md @@ -0,0 +1,18 @@ +# go-sasl + +[![godocs.io](https://godocs.io/github.com/emersion/go-sasl?status.svg)](https://godocs.io/github.com/emersion/go-sasl) +[![Build Status](https://travis-ci.org/emersion/go-sasl.svg?branch=master)](https://travis-ci.org/emersion/go-sasl) + +A [SASL](https://tools.ietf.org/html/rfc4422) library written in Go. + +Implemented mechanisms: + +* [ANONYMOUS](https://tools.ietf.org/html/rfc4505) +* [EXTERNAL](https://tools.ietf.org/html/rfc4422#appendix-A) +* [LOGIN](https://tools.ietf.org/html/draft-murchison-sasl-login-00) (obsolete, use PLAIN instead) +* [PLAIN](https://tools.ietf.org/html/rfc4616) +* [OAUTHBEARER](https://tools.ietf.org/html/rfc7628) + +## License + +MIT diff --git a/vendor/github.com/emersion/go-sasl/anonymous.go b/vendor/github.com/emersion/go-sasl/anonymous.go new file mode 100644 index 0000000000..abcb753e61 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/anonymous.go @@ -0,0 +1,56 @@ +package sasl + +// The ANONYMOUS mechanism name. +const Anonymous = "ANONYMOUS" + +type anonymousClient struct { + Trace string +} + +func (c *anonymousClient) Start() (mech string, ir []byte, err error) { + mech = Anonymous + ir = []byte(c.Trace) + return +} + +func (c *anonymousClient) Next(challenge []byte) (response []byte, err error) { + return nil, ErrUnexpectedServerChallenge +} + +// A client implementation of the ANONYMOUS authentication mechanism, as +// described in RFC 4505. +func NewAnonymousClient(trace string) Client { + return &anonymousClient{trace} +} + +// Get trace information from clients logging in anonymously. +type AnonymousAuthenticator func(trace string) error + +type anonymousServer struct { + done bool + authenticate AnonymousAuthenticator +} + +func (s *anonymousServer) Next(response []byte) (challenge []byte, done bool, err error) { + if s.done { + err = ErrUnexpectedClientResponse + return + } + + // No initial response, send an empty challenge + if response == nil { + return []byte{}, false, nil + } + + s.done = true + + err = s.authenticate(string(response)) + done = true + return +} + +// A server implementation of the ANONYMOUS authentication mechanism, as +// described in RFC 4505. +func NewAnonymousServer(authenticator AnonymousAuthenticator) Server { + return &anonymousServer{authenticate: authenticator} +} diff --git a/vendor/github.com/emersion/go-sasl/external.go b/vendor/github.com/emersion/go-sasl/external.go new file mode 100644 index 0000000000..ba24ccc0f7 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/external.go @@ -0,0 +1,67 @@ +package sasl + +import ( + "bytes" + "errors" +) + +// The EXTERNAL mechanism name. +const External = "EXTERNAL" + +type externalClient struct { + Identity string +} + +func (a *externalClient) Start() (mech string, ir []byte, err error) { + mech = External + ir = []byte(a.Identity) + return +} + +func (a *externalClient) Next(challenge []byte) (response []byte, err error) { + return nil, ErrUnexpectedServerChallenge +} + +// An implementation of the EXTERNAL authentication mechanism, as described in +// RFC 4422. Authorization identity may be left blank to indicate that the +// client is requesting to act as the identity associated with the +// authentication credentials. +func NewExternalClient(identity string) Client { + return &externalClient{identity} +} + +// ExternalAuthenticator authenticates users with the EXTERNAL mechanism. If +// the identity is left blank, it indicates that it is the same as the one used +// in the external credentials. If identity is not empty and the server doesn't +// support it, an error must be returned. +type ExternalAuthenticator func(identity string) error + +type externalServer struct { + done bool + authenticate ExternalAuthenticator +} + +func (a *externalServer) Next(response []byte) (challenge []byte, done bool, err error) { + if a.done { + return nil, false, ErrUnexpectedClientResponse + } + + // No initial response, send an empty challenge + if response == nil { + return []byte{}, false, nil + } + + a.done = true + + if bytes.Contains(response, []byte("\x00")) { + return nil, false, errors.New("sasl: identity contains a NUL character") + } + + return nil, true, a.authenticate(string(response)) +} + +// NewExternalServer creates a server implementation of the EXTERNAL +// authentication mechanism, as described in RFC 4422. +func NewExternalServer(authenticator ExternalAuthenticator) Server { + return &externalServer{authenticate: authenticator} +} diff --git a/vendor/github.com/emersion/go-sasl/login.go b/vendor/github.com/emersion/go-sasl/login.go new file mode 100644 index 0000000000..3847ee1464 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/login.go @@ -0,0 +1,89 @@ +package sasl + +import ( + "bytes" +) + +// The LOGIN mechanism name. +const Login = "LOGIN" + +var expectedChallenge = []byte("Password:") + +type loginClient struct { + Username string + Password string +} + +func (a *loginClient) Start() (mech string, ir []byte, err error) { + mech = "LOGIN" + ir = []byte(a.Username) + return +} + +func (a *loginClient) Next(challenge []byte) (response []byte, err error) { + if bytes.Compare(challenge, expectedChallenge) != 0 { + return nil, ErrUnexpectedServerChallenge + } else { + return []byte(a.Password), nil + } +} + +// A client implementation of the LOGIN authentication mechanism for SMTP, +// as described in http://www.iana.org/go/draft-murchison-sasl-login +// +// It is considered obsolete, and should not be used when other mechanisms are +// available. For plaintext password authentication use PLAIN mechanism. +func NewLoginClient(username, password string) Client { + return &loginClient{username, password} +} + +// Authenticates users with an username and a password. +type LoginAuthenticator func(username, password string) error + +type loginState int + +const ( + loginNotStarted loginState = iota + loginWaitingUsername + loginWaitingPassword +) + +type loginServer struct { + state loginState + username, password string + authenticate LoginAuthenticator +} + +// A server implementation of the LOGIN authentication mechanism, as described +// in https://tools.ietf.org/html/draft-murchison-sasl-login-00. +// +// LOGIN is obsolete and should only be enabled for legacy clients that cannot +// be updated to use PLAIN. +func NewLoginServer(authenticator LoginAuthenticator) Server { + return &loginServer{authenticate: authenticator} +} + +func (a *loginServer) Next(response []byte) (challenge []byte, done bool, err error) { + switch a.state { + case loginNotStarted: + // Check for initial response field, as per RFC4422 section 3 + if response == nil { + challenge = []byte("Username:") + break + } + a.state++ + fallthrough + case loginWaitingUsername: + a.username = string(response) + challenge = []byte("Password:") + case loginWaitingPassword: + a.password = string(response) + err = a.authenticate(a.username, a.password) + done = true + default: + err = ErrUnexpectedClientResponse + } + + a.state++ + return +} diff --git a/vendor/github.com/emersion/go-sasl/oauthbearer.go b/vendor/github.com/emersion/go-sasl/oauthbearer.go new file mode 100644 index 0000000000..7b2c503c6d --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/oauthbearer.go @@ -0,0 +1,198 @@ +package sasl + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "strconv" + "strings" +) + +// The OAUTHBEARER mechanism name. +const OAuthBearer = "OAUTHBEARER" + +type OAuthBearerError struct { + Status string `json:"status"` + Schemes string `json:"schemes"` + Scope string `json:"scope"` +} + +type OAuthBearerOptions struct { + Username string + Token string + Host string + Port int +} + +// Implements error +func (err *OAuthBearerError) Error() string { + return fmt.Sprintf("OAUTHBEARER authentication error (%v)", err.Status) +} + +type oauthBearerClient struct { + OAuthBearerOptions +} + +func (a *oauthBearerClient) Start() (mech string, ir []byte, err error) { + var authzid string + if a.Username != "" { + authzid = "a=" + a.Username + } + str := "n," + authzid + "," + + if a.Host != "" { + str += "\x01host=" + a.Host + } + + if a.Port != 0 { + str += "\x01port=" + strconv.Itoa(a.Port) + } + str += "\x01auth=Bearer " + a.Token + "\x01\x01" + ir = []byte(str) + return OAuthBearer, ir, nil +} + +func (a *oauthBearerClient) Next(challenge []byte) ([]byte, error) { + authBearerErr := &OAuthBearerError{} + if err := json.Unmarshal(challenge, authBearerErr); err != nil { + return nil, err + } else { + return nil, authBearerErr + } +} + +// An implementation of the OAUTHBEARER authentication mechanism, as +// described in RFC 7628. +func NewOAuthBearerClient(opt *OAuthBearerOptions) Client { + return &oauthBearerClient{*opt} +} + +type OAuthBearerAuthenticator func(opts OAuthBearerOptions) *OAuthBearerError + +type oauthBearerServer struct { + done bool + failErr error + authenticate OAuthBearerAuthenticator +} + +func (a *oauthBearerServer) fail(descr string) ([]byte, bool, error) { + blob, err := json.Marshal(OAuthBearerError{ + Status: "invalid_request", + Schemes: "bearer", + }) + if err != nil { + panic(err) // wtf + } + a.failErr = errors.New("sasl: client error: " + descr) + return blob, false, nil +} + +func (a *oauthBearerServer) Next(response []byte) (challenge []byte, done bool, err error) { + // Per RFC, we cannot just send an error, we need to return JSON-structured + // value as a challenge and then after getting dummy response from the + // client stop the exchange. + if a.failErr != nil { + // Server libraries (go-smtp, go-imap) will not call Next on + // protocol-specific SASL cancel response ('*'). However, GS2 (and + // indirectly OAUTHBEARER) defines a protocol-independent way to do so + // using 0x01. + if len(response) != 1 && response[0] != 0x01 { + return nil, true, errors.New("sasl: invalid response") + } + return nil, true, a.failErr + } + + if a.done { + err = ErrUnexpectedClientResponse + return + } + + // Generate empty challenge. + if response == nil { + return []byte{}, false, nil + } + + a.done = true + + // Cut n,a=username,\x01host=...\x01auth=... + // into + // n + // a=username + // \x01host=...\x01auth=...\x01\x01 + parts := bytes.SplitN(response, []byte{','}, 3) + if len(parts) != 3 { + return a.fail("Invalid response") + } + flag := parts[0] + authzid := parts[1] + if !bytes.Equal(flag, []byte{'n'}) { + return a.fail("Invalid response, missing 'n' in gs2-cb-flag") + } + opts := OAuthBearerOptions{} + if len(authzid) > 0 { + if !bytes.HasPrefix(authzid, []byte("a=")) { + return a.fail("Invalid response, missing 'a=' in gs2-authzid") + } + opts.Username = string(bytes.TrimPrefix(authzid, []byte("a="))) + } + + // Cut \x01host=...\x01auth=...\x01\x01 + // into + // *empty* + // host=... + // auth=... + // *empty* + // + // Note that this code does not do a lot of checks to make sure the input + // follows the exact format specified by RFC. + params := bytes.Split(parts[2], []byte{0x01}) + for _, p := range params { + // Skip empty fields (one at start and end). + if len(p) == 0 { + continue + } + + pParts := bytes.SplitN(p, []byte{'='}, 2) + if len(pParts) != 2 { + return a.fail("Invalid response, missing '='") + } + + switch string(pParts[0]) { + case "host": + opts.Host = string(pParts[1]) + case "port": + port, err := strconv.ParseUint(string(pParts[1]), 10, 16) + if err != nil { + return a.fail("Invalid response, malformed 'port' value") + } + opts.Port = int(port) + case "auth": + const prefix = "bearer " + strValue := string(pParts[1]) + // Token type is case-insensitive. + if !strings.HasPrefix(strings.ToLower(strValue), prefix) { + return a.fail("Unsupported token type") + } + opts.Token = strValue[len(prefix):] + default: + return a.fail("Invalid response, unknown parameter: " + string(pParts[0])) + } + } + + authzErr := a.authenticate(opts) + if authzErr != nil { + blob, err := json.Marshal(authzErr) + if err != nil { + panic(err) // wtf + } + a.failErr = authzErr + return blob, false, nil + } + + return nil, true, nil +} + +func NewOAuthBearerServer(auth OAuthBearerAuthenticator) Server { + return &oauthBearerServer{authenticate: auth} +} diff --git a/vendor/github.com/emersion/go-sasl/plain.go b/vendor/github.com/emersion/go-sasl/plain.go new file mode 100644 index 0000000000..5017bdc499 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/plain.go @@ -0,0 +1,77 @@ +package sasl + +import ( + "bytes" + "errors" +) + +// The PLAIN mechanism name. +const Plain = "PLAIN" + +type plainClient struct { + Identity string + Username string + Password string +} + +func (a *plainClient) Start() (mech string, ir []byte, err error) { + mech = "PLAIN" + ir = []byte(a.Identity + "\x00" + a.Username + "\x00" + a.Password) + return +} + +func (a *plainClient) Next(challenge []byte) (response []byte, err error) { + return nil, ErrUnexpectedServerChallenge +} + +// A client implementation of the PLAIN authentication mechanism, as described +// in RFC 4616. Authorization identity may be left blank to indicate that it is +// the same as the username. +func NewPlainClient(identity, username, password string) Client { + return &plainClient{identity, username, password} +} + +// Authenticates users with an identity, a username and a password. If the +// identity is left blank, it indicates that it is the same as the username. +// If identity is not empty and the server doesn't support it, an error must be +// returned. +type PlainAuthenticator func(identity, username, password string) error + +type plainServer struct { + done bool + authenticate PlainAuthenticator +} + +func (a *plainServer) Next(response []byte) (challenge []byte, done bool, err error) { + if a.done { + err = ErrUnexpectedClientResponse + return + } + + // No initial response, send an empty challenge + if response == nil { + return []byte{}, false, nil + } + + a.done = true + + parts := bytes.Split(response, []byte("\x00")) + if len(parts) != 3 { + err = errors.New("sasl: invalid response") + return + } + + identity := string(parts[0]) + username := string(parts[1]) + password := string(parts[2]) + + err = a.authenticate(identity, username, password) + done = true + return +} + +// A server implementation of the PLAIN authentication mechanism, as described +// in RFC 4616. +func NewPlainServer(authenticator PlainAuthenticator) Server { + return &plainServer{authenticate: authenticator} +} diff --git a/vendor/github.com/emersion/go-sasl/sasl.go b/vendor/github.com/emersion/go-sasl/sasl.go new file mode 100644 index 0000000000..525da88467 --- /dev/null +++ b/vendor/github.com/emersion/go-sasl/sasl.go @@ -0,0 +1,45 @@ +// Library for Simple Authentication and Security Layer (SASL) defined in RFC 4422. +package sasl + +// Note: +// Most of this code was copied, with some modifications, from net/smtp. It +// would be better if Go provided a standard package (e.g. crypto/sasl) that +// could be shared by SMTP, IMAP, and other packages. + +import ( + "errors" +) + +// Common SASL errors. +var ( + ErrUnexpectedClientResponse = errors.New("sasl: unexpected client response") + ErrUnexpectedServerChallenge = errors.New("sasl: unexpected server challenge") +) + +// Client interface to perform challenge-response authentication. +type Client interface { + // Begins SASL authentication with the server. It returns the + // authentication mechanism name and "initial response" data (if required by + // the selected mechanism). A non-nil error causes the client to abort the + // authentication attempt. + // + // A nil ir value is different from a zero-length value. The nil value + // indicates that the selected mechanism does not use an initial response, + // while a zero-length value indicates an empty initial response, which must + // be sent to the server. + Start() (mech string, ir []byte, err error) + + // Continues challenge-response authentication. A non-nil error causes + // the client to abort the authentication attempt. + Next(challenge []byte) (response []byte, err error) +} + +// Server interface to perform challenge-response authentication. +type Server interface { + // Begins or continues challenge-response authentication. If the client + // supplies an initial response, response is non-nil. + // + // If the authentication is finished, done is set to true. If the + // authentication has failed, an error is returned. + Next(response []byte) (challenge []byte, done bool, err error) +} diff --git a/vendor/github.com/go-crypt/crypt/LICENSE b/vendor/github.com/go-crypt/crypt/LICENSE new file mode 100644 index 0000000000..a149832c0d --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 github.com/go-crypt/crypt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/go-crypt/crypt/algorithm/const.go b/vendor/github.com/go-crypt/crypt/algorithm/const.go new file mode 100644 index 0000000000..08c585b4db --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/const.go @@ -0,0 +1,26 @@ +package algorithm + +const ( + // DigestSHA1 is te name for SHA1 digests. + DigestSHA1 = "sha1" + + // DigestSHA224 is te name for SHA224 digests. + DigestSHA224 = "sha224" + + // DigestSHA256 is te name for SHA256 digests. + DigestSHA256 = "sha256" + + // DigestSHA384 is te name for SHA384 digests. + DigestSHA384 = "sha384" + + // DigestSHA512 is te name for SHA512 digests. + DigestSHA512 = "sha512" +) + +const ( + // SaltLengthDefault is the default salt size for most implementations. + SaltLengthDefault = 16 + + // KeyLengthDefault is the default key size for most implementations. + KeyLengthDefault = 32 +) diff --git a/vendor/github.com/go-crypt/crypt/algorithm/doc.go b/vendor/github.com/go-crypt/crypt/algorithm/doc.go new file mode 100644 index 0000000000..35a89bc551 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/doc.go @@ -0,0 +1,3 @@ +// Package algorithm is a package which contains the individual algorithms and interfaces related to their +// implementation. +package algorithm diff --git a/vendor/github.com/go-crypt/crypt/algorithm/errors.go b/vendor/github.com/go-crypt/crypt/algorithm/errors.go new file mode 100644 index 0000000000..2cec6fc8ae --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/errors.go @@ -0,0 +1,66 @@ +package algorithm + +import ( + "errors" +) + +var ( + // ErrEncodedHashInvalidFormat is an error returned when an encoded hash has an invalid format. + ErrEncodedHashInvalidFormat = errors.New("provided encoded hash has an invalid format") + + // ErrEncodedHashInvalidIdentifier is an error returned when an encoded hash has an invalid identifier for the + // given digest. + ErrEncodedHashInvalidIdentifier = errors.New("provided encoded hash has an invalid identifier") + + // ErrEncodedHashInvalidVersion is an error returned when an encoded hash has an unsupported or otherwise invalid + // version. + ErrEncodedHashInvalidVersion = errors.New("provided encoded hash has an invalid version") + + // ErrEncodedHashInvalidOption is an error returned when an encoded hash has an unsupported or otherwise invalid + // option in the option field. + ErrEncodedHashInvalidOption = errors.New("provided encoded hash has an invalid option") + + // ErrEncodedHashInvalidOptionKey is an error returned when an encoded hash has an unknown or otherwise invalid + // option key in the option field. + ErrEncodedHashInvalidOptionKey = errors.New("provided encoded hash has an invalid option key") + + // ErrEncodedHashInvalidOptionValue is an error returned when an encoded hash has an unknown or otherwise invalid + // option value in the option field. + ErrEncodedHashInvalidOptionValue = errors.New("provided encoded hash has an invalid option value") + + // ErrEncodedHashKeyEncoding is an error returned when an encoded hash has a salt with an invalid or unsupported + // encoding. + ErrEncodedHashKeyEncoding = errors.New("provided encoded hash has a key value that can't be decoded") + + // ErrEncodedHashSaltEncoding is an error returned when an encoded hash has a salt with an invalid or unsupported + // encoding. + ErrEncodedHashSaltEncoding = errors.New("provided encoded hash has a salt value that can't be decoded") + + // ErrKeyDerivation is returned when a Key function returns an error. + ErrKeyDerivation = errors.New("failed to derive the key with the provided parameters") + + // ErrSaltEncoding is an error returned when a salt has an invalid or unsupported encoding. + ErrSaltEncoding = errors.New("provided salt has a value that can't be decoded") + + // ErrPasswordInvalid is an error returned when a password has an invalid or unsupported properties. It is NOT + // returned on password mismatches. + ErrPasswordInvalid = errors.New("password is invalid") + + // ErrSaltInvalid is an error returned when a salt has an invalid or unsupported properties. + ErrSaltInvalid = errors.New("salt is invalid") + + // ErrSaltReadRandomBytes is an error returned when generating the random bytes for salt resulted in an error. + ErrSaltReadRandomBytes = errors.New("could not read random bytes for salt") + + // ErrParameterInvalid is an error returned when a parameter has an invalid value. + ErrParameterInvalid = errors.New("parameter is invalid") +) + +// Error format strings. +const ( + ErrFmtInvalidIntParameter = "%w: parameter '%s' must be between %d%s and %d but is set to '%d'" + ErrFmtDigestDecode = "%s decode error: %w" + ErrFmtDigestMatch = "%s match error: %w" + ErrFmtHasherHash = "%s hashing error: %w" + ErrFmtHasherValidation = "%s validation error: %w" +) diff --git a/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/const.go b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/const.go new file mode 100644 index 0000000000..b7906462b8 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/const.go @@ -0,0 +1,46 @@ +package shacrypt + +const ( + // EncodingFmt is the encoding format for this algorithm. + EncodingFmt = "$%s$rounds=%d$%s$%s" + + // EncodingFmtRoundsOmitted is the encoding format for this algorithm when the rounds can be omitted. + EncodingFmtRoundsOmitted = "$%s$%s$%s" + + // AlgName is the name for this algorithm. + AlgName = "shacrypt" + + // AlgIdentifierSHA256 is the identifier used in encoded SHA256 variants of this algorithm. + AlgIdentifierSHA256 = "5" + + // AlgIdentifierSHA512 is the identifier used in encoded SHA512 variants of this algorithm. + AlgIdentifierSHA512 = "6" + + // IterationsMin is the minimum number of iterations accepted. + IterationsMin = 1000 + + // IterationsMax is the maximum number of iterations accepted. + IterationsMax = 999999999 + + // IterationsDefaultSHA256 is the default number of iterations for SHA256. + IterationsDefaultSHA256 = 1000000 + + // IterationsDefaultSHA512 is the default number of iterations for SHA512. + IterationsDefaultSHA512 = 500000 + + // IterationsDefaultOmitted is the default number of iterations when the rounds are omitted. + IterationsDefaultOmitted = 5000 + + // SaltLengthMin is the minimum salt length. + SaltLengthMin = 1 + + // SaltLengthMax is the maximum salt length. + SaltLengthMax = 16 + + // SaltCharSet are the valid characters for the salt. + SaltCharSet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789./" +) + +const ( + variantDefault = VariantSHA512 +) diff --git a/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/decoder.go b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/decoder.go new file mode 100644 index 0000000000..a0fb81a839 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/decoder.go @@ -0,0 +1,136 @@ +package shacrypt + +import ( + "fmt" + "strconv" + + "github.com/go-crypt/crypt/algorithm" + "github.com/go-crypt/crypt/internal/encoding" +) + +// RegisterDecoder the decoder with the algorithm.DecoderRegister. +func RegisterDecoder(r algorithm.DecoderRegister) (err error) { + if err = RegisterDecoderSHA256(r); err != nil { + return err + } + + if err = RegisterDecoderSHA512(r); err != nil { + return err + } + + return nil +} + +// RegisterDecoderSHA256 registers specifically the sha256 decoder variant with the algorithm.DecoderRegister. +func RegisterDecoderSHA256(r algorithm.DecoderRegister) (err error) { + if err = r.RegisterDecodeFunc(VariantSHA256.Prefix(), DecodeVariant(VariantSHA256)); err != nil { + return err + } + + return nil +} + +// RegisterDecoderSHA512 registers specifically the sha512 decoder variant with the algorithm.DecoderRegister. +func RegisterDecoderSHA512(r algorithm.DecoderRegister) (err error) { + if err = r.RegisterDecodeFunc(VariantSHA512.Prefix(), DecodeVariant(VariantSHA512)); err != nil { + return err + } + + return nil +} + +// Decode the encoded digest into a algorithm.Digest. +func Decode(encodedDigest string) (digest algorithm.Digest, err error) { + return DecodeVariant(VariantNone)(encodedDigest) +} + +// DecodeVariant the encoded digest into a algorithm.Digest provided it matches the provided Variant. If VariantNone is +// used all variants can be decoded. +func DecodeVariant(v Variant) func(encodedDigest string) (digest algorithm.Digest, err error) { + return func(encodedDigest string) (digest algorithm.Digest, err error) { + var ( + parts []string + variant Variant + ) + + if variant, parts, err = decoderParts(encodedDigest); err != nil { + return nil, fmt.Errorf(algorithm.ErrFmtDigestDecode, AlgName, err) + } + + if v != VariantNone && v != variant { + return nil, fmt.Errorf(algorithm.ErrFmtDigestDecode, AlgName, fmt.Errorf("the '%s' variant cannot be decoded only the '%s' variant can be", variant.String(), v.String())) + } + + if digest, err = decode(variant, parts); err != nil { + return nil, fmt.Errorf(algorithm.ErrFmtDigestDecode, AlgName, err) + } + + return digest, nil + } +} + +func decoderParts(encodedDigest string) (variant Variant, parts []string, err error) { + parts = encoding.Split(encodedDigest, -1) + + if n := len(parts); n != 4 && n != 5 { + return VariantNone, nil, algorithm.ErrEncodedHashInvalidFormat + } + + variant = NewVariant(parts[1]) + + if variant == VariantNone { + return variant, nil, fmt.Errorf("%w: identifier '%s' is not an encoded %s digest", algorithm.ErrEncodedHashInvalidIdentifier, parts[1], AlgName) + } + + return variant, parts[2:], nil +} + +func decode(variant Variant, parts []string) (digest algorithm.Digest, err error) { + decoded := &Digest{ + variant: variant, + } + + var ( + ip, is, ik int + ) + + switch len(parts) { + case 2: + ip, is, ik = -1, 0, 1 + case 3: + ip, is, ik = 0, 1, 2 + } + + if len(parts[ik]) == 0 { + return nil, fmt.Errorf("%w: key has 0 bytes", algorithm.ErrEncodedHashKeyEncoding) + } + + decoded.iterations = IterationsDefaultOmitted + + var params []encoding.Parameter + + if ip >= 0 { + if params, err = encoding.DecodeParameterStr(parts[ip]); err != nil { + return nil, err + } + } + + for _, param := range params { + switch param.Key { + case "rounds": + var rounds uint64 + + if rounds, err = strconv.ParseUint(param.Value, 10, 32); err != nil { + return nil, fmt.Errorf("%w: option '%s' has invalid value '%s': %v", algorithm.ErrEncodedHashInvalidOptionValue, param.Key, param.Value, err) + } + + decoded.iterations = int(rounds) + default: + return nil, fmt.Errorf("%w: option '%s' with value '%s' is unknown", algorithm.ErrEncodedHashInvalidOptionKey, param.Key, param.Value) + } + } + + decoded.salt, decoded.key = []byte(parts[is]), []byte(parts[ik]) + + return decoded, nil +} diff --git a/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/digest.go b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/digest.go new file mode 100644 index 0000000000..46a760220b --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/digest.go @@ -0,0 +1,83 @@ +package shacrypt + +import ( + "crypto/subtle" + "fmt" + "strings" + + xcrypt "github.com/go-crypt/x/crypt" + + "github.com/go-crypt/crypt/algorithm" +) + +// Digest is a digest which handles SHA-crypt hashes like SHA256 or SHA512. +type Digest struct { + variant Variant + + iterations int + salt, key []byte +} + +// Match returns true if the string password matches the current shacrypt.Digest. +func (d *Digest) Match(password string) (match bool) { + return d.MatchBytes([]byte(password)) +} + +// MatchBytes returns true if the []byte passwordBytes matches the current shacrypt.Digest. +func (d *Digest) MatchBytes(passwordBytes []byte) (match bool) { + match, _ = d.MatchBytesAdvanced(passwordBytes) + + return match +} + +// MatchAdvanced is the same as Match except if there is an error it returns that as well. +func (d *Digest) MatchAdvanced(password string) (match bool, err error) { + if match, err = d.MatchBytesAdvanced([]byte(password)); err != nil { + return match, fmt.Errorf(algorithm.ErrFmtDigestMatch, AlgName, err) + } + + return match, nil +} + +// MatchBytesAdvanced is the same as MatchBytes except if there is an error it returns that as well. +func (d *Digest) MatchBytesAdvanced(passwordBytes []byte) (match bool, err error) { + if len(d.key) == 0 { + return false, fmt.Errorf("%w: key has 0 bytes", algorithm.ErrPasswordInvalid) + } + + return subtle.ConstantTimeCompare(d.key, xcrypt.KeySHACrypt(d.variant.HashFunc(), passwordBytes, d.salt, d.iterations)) == 1, nil +} + +// Encode this Digest as a string for storage. +func (d *Digest) Encode() (hash string) { + switch d.iterations { + case IterationsDefaultOmitted: + return strings.ReplaceAll(fmt.Sprintf(EncodingFmtRoundsOmitted, + d.variant.Prefix(), + d.salt, d.key, + ), "\n", "") + default: + return strings.ReplaceAll(fmt.Sprintf(EncodingFmt, + d.variant.Prefix(), d.iterations, + d.salt, d.key, + ), "\n", "") + } +} + +// String returns the storable format of the shacrypt.Digest hash utilizing fmt.Sprintf and shacrypt.EncodingFmt. +func (d *Digest) String() string { + return d.Encode() +} + +func (d *Digest) defaults() { + switch d.variant { + case VariantSHA256, VariantSHA512: + break + default: + d.variant = variantDefault + } + + if d.iterations == 0 { + d.iterations = d.variant.DefaultIterations() + } +} diff --git a/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/doc.go b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/doc.go new file mode 100644 index 0000000000..4ac7c8e691 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/doc.go @@ -0,0 +1,7 @@ +// Package shacrypt provides helpful abstractions for an implementation of SHA-crypt and implements +// github.com/go-crypt/crypt interfaces. +// +// See https://www.akkadia.org/drepper/SHA-crypt.html for specification details. +// +// This implementation is loaded by crypt.NewDefaultDecoder and crypt.NewDecoderAll. +package shacrypt diff --git a/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/hasher.go b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/hasher.go new file mode 100644 index 0000000000..6b8f9d11de --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/hasher.go @@ -0,0 +1,156 @@ +package shacrypt + +import ( + "fmt" + + xcrypt "github.com/go-crypt/x/crypt" + + "github.com/go-crypt/crypt/algorithm" + "github.com/go-crypt/crypt/internal/random" +) + +// New returns a *Hasher without any settings configured. This d to a SHA512 hash.Hash +// with 1000000 iterations. These settings can be overridden with the methods with the With prefix. +func New(opts ...Opt) (hasher *Hasher, err error) { + hasher = &Hasher{} + + if err = hasher.WithOptions(opts...); err != nil { + return nil, err + } + + if err = hasher.Validate(); err != nil { + return nil, err + } + + return hasher, nil +} + +// Hasher is a algorithm.Hash for SHA-crypt which can be initialized via shacrypt.New using a functional options pattern. +type Hasher struct { + variant Variant + + iterations, bytesSalt int + + d bool +} + +// NewSHA256 returns a *Hasher with the SHA256 hash.Hash which d to 1000000 iterations. These +// settings can be overridden with the methods with the With prefix. +func NewSHA256() (hasher *Hasher, err error) { + return New( + WithVariant(VariantSHA256), + WithIterations(VariantSHA256.DefaultIterations()), + ) +} + +// NewSHA512 returns a *Hasher with the SHA512 hash.Hash which d to 1000000 iterations. These +// settings can be overridden with the methods with the With prefix. +func NewSHA512() (hasher *Hasher, err error) { + return New( + WithVariant(VariantSHA512), + WithIterations(VariantSHA512.DefaultIterations()), + ) +} + +// WithOptions defines the options for this scrypt.Hasher. +func (h *Hasher) WithOptions(opts ...Opt) (err error) { + for _, opt := range opts { + if err = opt(h); err != nil { + return err + } + } + + return nil +} + +// Hash performs the hashing operation and returns either a shacrypt.Digest as a algorithm.Digest or an error. +func (h *Hasher) Hash(password string) (digest algorithm.Digest, err error) { + h.defaults() + + if digest, err = h.hash(password); err != nil { + return nil, fmt.Errorf(algorithm.ErrFmtHasherHash, AlgName, err) + } + + return digest, nil +} + +func (h *Hasher) hash(password string) (digest algorithm.Digest, err error) { + var salt []byte + + if salt, err = random.CharSetBytes(h.bytesSalt, SaltCharSet); err != nil { + return nil, fmt.Errorf("%w: %v", algorithm.ErrSaltReadRandomBytes, err) + } + + return h.hashWithSalt(password, salt) +} + +// HashWithSalt overloads the Hash method allowing the user to provide a salt. It's recommended instead to configure the +// salt size and let this be a random value generated using crypto/rand. +func (h *Hasher) HashWithSalt(password string, salt []byte) (digest algorithm.Digest, err error) { + h.defaults() + + if digest, err = h.hashWithSalt(password, salt); err != nil { + return nil, fmt.Errorf(algorithm.ErrFmtHasherHash, AlgName, err) + } + + return digest, nil +} + +func (h *Hasher) hashWithSalt(password string, salt []byte) (digest algorithm.Digest, err error) { + if s := len(salt); s > SaltLengthMax || s < SaltLengthMin { + return nil, fmt.Errorf("%w: salt bytes must have a length of between %d and %d but has a length of %d", algorithm.ErrSaltInvalid, SaltLengthMin, SaltLengthMax, len(salt)) + } + + d := &Digest{ + variant: h.variant, + iterations: h.iterations, + salt: salt, + } + + d.defaults() + + d.key = xcrypt.KeySHACrypt(d.variant.HashFunc(), []byte(password), d.salt, d.iterations) + + return d, nil +} + +// MustHash overloads the Hash method and panics if the error is not nil. It's recommended if you use this option to +// utilize the Validate method first or handle the panic appropriately. +func (h *Hasher) MustHash(password string) (digest algorithm.Digest) { + var err error + + if digest, err = h.Hash(password); err != nil { + panic(err) + } + + return digest +} + +// Validate checks the settings/parameters for this shacrypt.Hasher and returns an error. +func (h *Hasher) Validate() (err error) { + h.defaults() + + if err = h.validate(); err != nil { + return fmt.Errorf(algorithm.ErrFmtHasherValidation, AlgName, err) + } + + return nil +} + +func (h *Hasher) validate() (err error) { + h.defaults() + + return nil +} + +func (h *Hasher) defaults() { + if h.d { + return + } + + h.d = true + + if h.bytesSalt < SaltLengthMin { + h.bytesSalt = algorithm.SaltLengthDefault + } +} diff --git a/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/opts.go b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/opts.go new file mode 100644 index 0000000000..3499882be5 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/opts.go @@ -0,0 +1,98 @@ +package shacrypt + +import ( + "fmt" + + "github.com/go-crypt/crypt/algorithm" +) + +// Opt describes the functional option pattern for the shacrypt.Hasher. +type Opt func(h *Hasher) (err error) + +// WithVariant configures the shacrypt.Variant of the resulting shacrypt.Digest. +// Default is shacrypt.VariantSHA512. +func WithVariant(variant Variant) Opt { + return func(h *Hasher) (err error) { + switch variant { + case VariantNone: + return nil + case VariantSHA256, VariantSHA512: + h.variant = variant + + return nil + default: + return fmt.Errorf(algorithm.ErrFmtHasherValidation, AlgName, fmt.Errorf("%w: variant '%d' is invalid", algorithm.ErrParameterInvalid, variant)) + } + } +} + +// WithVariantName uses the variant name or identifier to configure the shacrypt.Variant of the resulting shacrypt.Digest. +// Default is shacrypt.VariantSHA512. +func WithVariantName(identifier string) Opt { + return func(h *Hasher) (err error) { + if identifier == "" { + return nil + } + + variant := NewVariant(identifier) + + if variant == VariantNone { + return fmt.Errorf(algorithm.ErrFmtHasherValidation, AlgName, fmt.Errorf("%w: variant identifier '%s' is invalid", algorithm.ErrParameterInvalid, identifier)) + } + + h.variant = variant + + return nil + } +} + +// WithSHA256 adjusts this Hasher to utilize the SHA256 hash.Hash. +func WithSHA256() Opt { + return func(h *Hasher) (err error) { + h.variant = VariantSHA256 + + return nil + } +} + +// WithSHA512 adjusts this Hasher to utilize the SHA512 hash.Hash. +func WithSHA512() Opt { + return func(h *Hasher) (err error) { + h.variant = VariantSHA512 + + return nil + } +} + +// WithIterations sets the iterations parameter of the resulting shacrypt.Digest. +// Minimum 1000, Maximum 999999999. Default is 1000000. +func WithIterations(iterations int) Opt { + return func(h *Hasher) (err error) { + if iterations < IterationsMin || iterations > IterationsMax { + return fmt.Errorf(algorithm.ErrFmtHasherValidation, AlgName, fmt.Errorf(algorithm.ErrFmtInvalidIntParameter, algorithm.ErrParameterInvalid, "iterations", IterationsMin, "", IterationsMax, iterations)) + } + + h.iterations = iterations + + return nil + } +} + +// WithRounds is an alias for shacrypt.WithIterations. +func WithRounds(rounds int) Opt { + return WithIterations(rounds) +} + +// WithSaltLength adjusts the salt size (in bytes) of the resulting shacrypt.Digest. +// Minimum 1, Maximum 16. Default is 16. +func WithSaltLength(bytes int) Opt { + return func(h *Hasher) (err error) { + if bytes < SaltLengthMin || bytes > SaltLengthMax { + return fmt.Errorf(algorithm.ErrFmtHasherValidation, AlgName, fmt.Errorf(algorithm.ErrFmtInvalidIntParameter, algorithm.ErrParameterInvalid, "salt length", SaltLengthMin, "", SaltLengthMax, bytes)) + } + + h.bytesSalt = bytes + + return nil + } +} diff --git a/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/variant.go b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/variant.go new file mode 100644 index 0000000000..681ddc4ecd --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/shacrypt/variant.go @@ -0,0 +1,92 @@ +package shacrypt + +import ( + "crypto/sha256" + "crypto/sha512" + + "github.com/go-crypt/crypt/algorithm" +) + +// NewVariant converts an identifier string to a shacrypt.Variant. +func NewVariant(identifier string) Variant { + switch identifier { + case AlgIdentifierSHA256, algorithm.DigestSHA256: + return VariantSHA256 + case AlgIdentifierSHA512, algorithm.DigestSHA512: + return VariantSHA512 + default: + return VariantSHA512 + } +} + +// Variant is a variant of the shacrypt.Digest. +type Variant int + +const ( + // VariantNone is a variant of the shacrypt.Digest which is unknown. + VariantNone Variant = iota + + // VariantSHA256 is a variant of the shacrypt.Digest which uses SHA-256. + VariantSHA256 + + // VariantSHA512 is a variant of the shacrypt.Digest which uses SHA-512. + VariantSHA512 +) + +// String implements the fmt.Stringer returning a string representation of the shacrypt.Variant. +func (v Variant) String() (identifier string) { + switch v { + case VariantSHA256: + return algorithm.DigestSHA256 + case VariantSHA512: + return algorithm.DigestSHA512 + default: + return + } +} + +// Prefix returns the shacrypt.Variant prefix identifier. +func (v Variant) Prefix() (prefix string) { + switch v { + case VariantSHA256: + return AlgIdentifierSHA256 + case VariantSHA512: + return AlgIdentifierSHA512 + default: + return AlgIdentifierSHA512 + } +} + +// Name returns the Variant name. +func (v Variant) Name() (s string) { + switch v { + case VariantSHA256: + return algorithm.DigestSHA256 + case VariantSHA512: + return algorithm.DigestSHA512 + default: + return algorithm.DigestSHA512 + } +} + +// HashFunc returns the internal HMAC HashFunc. +func (v Variant) HashFunc() algorithm.HashFunc { + switch v { + case VariantSHA256: + return sha256.New + case VariantSHA512: + return sha512.New + default: + return sha512.New + } +} + +// DefaultIterations returns the default iterations for the particular variant. +func (v Variant) DefaultIterations() int { + switch v { + case VariantSHA512: + return IterationsDefaultSHA512 + default: + return IterationsDefaultSHA256 + } +} diff --git a/vendor/github.com/go-crypt/crypt/algorithm/types.go b/vendor/github.com/go-crypt/crypt/algorithm/types.go new file mode 100644 index 0000000000..99fcb583ab --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/algorithm/types.go @@ -0,0 +1,62 @@ +package algorithm + +import ( + "fmt" + "hash" +) + +// Hash is an interface which implements password hashing. +type Hash interface { + // Validate checks the hasher configuration to ensure it's valid. This should be used when the Hash is going to be + // reused and you should use it in conjunction with MustHash. + Validate() (err error) + + // Hash performs the hashing operation on a password and resets any relevant parameters such as a manually set salt. + // It then returns a Digest and error. + Hash(password string) (hashed Digest, err error) + + // HashWithSalt is an overload of Digest that also accepts a salt. + HashWithSalt(password string, salt []byte) (hashed Digest, err error) + + // MustHash overloads the Hash method and panics if the error is not nil. It's recommended if you use this method to + // utilize the Validate method first or handle the panic appropriately. + MustHash(password string) (hashed Digest) +} + +// Matcher is an interface used to match passwords. +type Matcher interface { + Match(password string) (match bool) + MatchBytes(passwordBytes []byte) (match bool) + MatchAdvanced(password string) (match bool, err error) + MatchBytesAdvanced(passwordBytes []byte) (match bool, err error) +} + +// Digest represents a hashed password. It's implemented by all hashed password results so that when we pass a +// stored hash into its relevant type we can verify the password against the hash. +type Digest interface { + fmt.Stringer + + Matcher + + Encode() (hash string) +} + +// DecodeFunc describes a function to decode an encoded digest into a algorithm.Digest. +type DecodeFunc func(encodedDigest string) (digest Digest, err error) + +// DecoderRegister describes an implementation that allows registering DecodeFunc's. +type DecoderRegister interface { + RegisterDecodeFunc(prefix string, decoder DecodeFunc) (err error) + RegisterDecodePrefix(prefix, identifier string) (err error) + + Decoder +} + +// Decoder is a representation of a implementation that performs generic decoding. Currently this is just intended for +// use by implementers. +type Decoder interface { + Decode(encodedDigest string) (digest Digest, err error) +} + +// HashFunc is a function which returns a hash.Hash. +type HashFunc func() hash.Hash diff --git a/vendor/github.com/go-crypt/crypt/internal/encoding/base64adapted.go b/vendor/github.com/go-crypt/crypt/internal/encoding/base64adapted.go new file mode 100644 index 0000000000..55b84c1154 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/internal/encoding/base64adapted.go @@ -0,0 +1,14 @@ +package encoding + +import ( + "encoding/base64" +) + +const ( + encodeBase64Adapted = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789./" +) + +var ( + // Base64RawAdaptedEncoding is the adapted encoding for crypt purposes without padding. + Base64RawAdaptedEncoding = base64.NewEncoding(encodeBase64Adapted).WithPadding(base64.NoPadding) +) diff --git a/vendor/github.com/go-crypt/crypt/internal/encoding/const.go b/vendor/github.com/go-crypt/crypt/internal/encoding/const.go new file mode 100644 index 0000000000..b0a3d56d3e --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/internal/encoding/const.go @@ -0,0 +1,9 @@ +package encoding + +const ( + // Delimiter rune for all encodings. + Delimiter = rune('$') + + // DelimiterStr is the string variation of Delimiter. + DelimiterStr = string(Delimiter) +) diff --git a/vendor/github.com/go-crypt/crypt/internal/encoding/digest.go b/vendor/github.com/go-crypt/crypt/internal/encoding/digest.go new file mode 100644 index 0000000000..f854968abe --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/internal/encoding/digest.go @@ -0,0 +1,10 @@ +package encoding + +import ( + "strings" +) + +// Split an encoded digest by the encoding.Delimiter. +func Split(encodedDigest string, n int) (parts []string) { + return strings.SplitN(encodedDigest, DelimiterStr, n) +} diff --git a/vendor/github.com/go-crypt/crypt/internal/encoding/doc.go b/vendor/github.com/go-crypt/crypt/internal/encoding/doc.go new file mode 100644 index 0000000000..30a4f0be5a --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/internal/encoding/doc.go @@ -0,0 +1,2 @@ +// Package encoding is an internal encoding helper package. +package encoding diff --git a/vendor/github.com/go-crypt/crypt/internal/encoding/parameters.go b/vendor/github.com/go-crypt/crypt/internal/encoding/parameters.go new file mode 100644 index 0000000000..b7935be9af --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/internal/encoding/parameters.go @@ -0,0 +1,54 @@ +package encoding + +import ( + "fmt" + "strconv" + "strings" +) + +// Parameter is a key value pair. +type Parameter struct { + Key string + Value string +} + +// Int converts the Value to an int using strconv.Atoi. +func (p Parameter) Int() (int, error) { + return strconv.Atoi(p.Value) +} + +const ( + // ParameterDefaultItemSeparator is the default item separator. + ParameterDefaultItemSeparator = "," + + // ParameterDefaultKeyValueSeparator is the default key value separator. + ParameterDefaultKeyValueSeparator = "=" +) + +// DecodeParameterStr is an alias for DecodeParameterStrAdvanced using item separator and key value separator +// of ',' and '=' respectively. +func DecodeParameterStr(input string) (opts []Parameter, err error) { + return DecodeParameterStrAdvanced(input, ParameterDefaultItemSeparator, ParameterDefaultKeyValueSeparator) +} + +// DecodeParameterStrAdvanced decodes parameter strings into a []Parameter where sepItem separates each parameter, and sepKV separates the key and value. +func DecodeParameterStrAdvanced(input string, sepItem, sepKV string) (opts []Parameter, err error) { + if input == "" { + return nil, fmt.Errorf("empty strings can't be decoded to parameters") + } + + o := strings.Split(input, sepItem) + + opts = make([]Parameter, len(o)) + + for i, joined := range o { + kv := strings.SplitN(joined, sepKV, 2) + if len(kv) != 2 { + return nil, fmt.Errorf("parameter pair '%s' is not properly encoded: does not contain kv separator '%s'", joined, sepKV) + } + + opts[i] = Parameter{Key: kv[0], Value: kv[1]} + } + + return opts, nil +} diff --git a/vendor/github.com/go-crypt/crypt/internal/random/bytes.go b/vendor/github.com/go-crypt/crypt/internal/random/bytes.go new file mode 100644 index 0000000000..20bc235354 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/internal/random/bytes.go @@ -0,0 +1,32 @@ +package random + +import ( + "crypto/rand" + "io" +) + +// Bytes returns random arbitrary bytes with a length of n. +func Bytes(n int) (bytes []byte, err error) { + bytes = make([]byte, n) + + if _, err = io.ReadFull(rand.Reader, bytes); err != nil { + return nil, err + } + + return bytes, nil +} + +// CharSetBytes returns random bytes with a length of n from the characters in the charset. +func CharSetBytes(n int, charset string) (bytes []byte, err error) { + bytes = make([]byte, n) + + if _, err = rand.Read(bytes); err != nil { + return nil, err + } + + for i, b := range bytes { + bytes[i] = charset[b%byte(len(charset))] + } + + return bytes, nil +} diff --git a/vendor/github.com/go-crypt/crypt/internal/random/doc.go b/vendor/github.com/go-crypt/crypt/internal/random/doc.go new file mode 100644 index 0000000000..2bf9f24d31 --- /dev/null +++ b/vendor/github.com/go-crypt/crypt/internal/random/doc.go @@ -0,0 +1,2 @@ +// Package random is an internal helper package. +package random diff --git a/vendor/github.com/go-crypt/x/LICENSE b/vendor/github.com/go-crypt/x/LICENSE new file mode 100644 index 0000000000..6a66aea5ea --- /dev/null +++ b/vendor/github.com/go-crypt/x/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/go-crypt/x/base64/base64.go b/vendor/github.com/go-crypt/x/base64/base64.go new file mode 100644 index 0000000000..2cb382a793 --- /dev/null +++ b/vendor/github.com/go-crypt/x/base64/base64.go @@ -0,0 +1,51 @@ +package base64 + +import ( + "encoding/base64" +) + +var AdaptedEncoding = base64.NewEncoding(encodeAdapted) + +// BcryptEncoding is the Bcrypt Base64 Alternative encoding. +var BcryptEncoding = base64.NewEncoding(bcryptB64Alphabet) + +// EncodeCrypt implements the linux crypt lib's B64 encoding. +func EncodeCrypt(src []byte) (dst []byte) { + if len(src) == 0 { + return nil + } + + dst = make([]byte, (len(src)*8+5)/6) + + idst, isrc := 0, 0 + + for isrc < len(src)/3*3 { + v := uint(src[isrc+2])<<16 | uint(src[isrc+1])<<8 | uint(src[isrc]) + dst[idst+0] = cryptB64Alphabet[v&0x3f] + dst[idst+1] = cryptB64Alphabet[v>>6&0x3f] + dst[idst+2] = cryptB64Alphabet[v>>12&0x3f] + dst[idst+3] = cryptB64Alphabet[v>>18] + idst += 4 + isrc += 3 + } + + remainder := len(src) - isrc + + if remainder == 0 { + return dst + } + + v := uint(src[isrc+0]) + if remainder == 2 { + v |= uint(src[isrc+1]) << 8 + } + + dst[idst+0] = cryptB64Alphabet[v&0x3f] + dst[idst+1] = cryptB64Alphabet[v>>6&0x3f] + + if remainder == 2 { + dst[idst+2] = cryptB64Alphabet[v>>12] + } + + return dst +} diff --git a/vendor/github.com/go-crypt/x/base64/const.go b/vendor/github.com/go-crypt/x/base64/const.go new file mode 100644 index 0000000000..2e99236872 --- /dev/null +++ b/vendor/github.com/go-crypt/x/base64/const.go @@ -0,0 +1,7 @@ +package base64 + +const ( + cryptB64Alphabet = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + bcryptB64Alphabet = "./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + encodeAdapted = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789./" +) diff --git a/vendor/github.com/go-crypt/x/crypt/const.go b/vendor/github.com/go-crypt/x/crypt/const.go new file mode 100644 index 0000000000..09153bfb7f --- /dev/null +++ b/vendor/github.com/go-crypt/x/crypt/const.go @@ -0,0 +1,223 @@ +package crypt + +var permuteTableMD5Crypt = [16]byte{ + 12, 6, 0, + 13, 7, 1, + 14, 8, 2, + 15, 9, 3, + 5, 10, 4, + 11, +} + +var permuteTableSHA1Crypt = [21]byte{ + 2, 1, 0, + 5, 4, 3, + 8, 7, 6, + 11, 10, 9, + 14, 13, 12, + 17, 16, 15, + 0, 19, 18, +} + +var permuteTableSHACryptSHA256 = [32]byte{ + 20, 10, 0, + 11, 1, 21, + 2, 22, 12, + 23, 13, 3, + 14, 4, 24, + 5, 25, 15, + 26, 16, 6, + 17, 7, 27, + 8, 28, 18, + 29, 19, 9, + 30, 31, +} + +var permuteTableSHACryptSHA512 = [64]byte{ + 42, 21, 0, + 1, 43, 22, + 23, 2, 44, + 45, 24, 3, + 4, 46, 25, + 26, 5, 47, + 48, 27, 6, + 7, 49, 28, + 29, 8, 50, + 51, 30, 9, + 10, 52, 31, + 32, 11, 53, + 54, 33, 12, + 13, 55, 34, + 35, 14, 56, + 57, 36, 15, + 16, 58, 37, + 38, 17, 59, + 60, 39, 18, + 19, 61, 40, + 41, 20, 62, + 63, +} + +// The following is the 1517 bytes of Hamlet III.ii which is public domain. This is used by Sun's MD5 Crypt function. +var magicTableMD5CryptSunHamlet = [1517]byte{ + 84, 111, 32, 98, 101, 44, 32, 111, 114, 32, 110, + 111, 116, 32, 116, 111, 32, 98, 101, 44, 45, + 45, 116, 104, 97, 116, 32, 105, 115, 32, 116, + 104, 101, 32, 113, 117, 101, 115, 116, 105, 111, + 110, 58, 45, 45, 10, 87, 104, 101, 116, 104, + 101, 114, 32, 39, 116, 105, 115, 32, 110, 111, + 98, 108, 101, 114, 32, 105, 110, 32, 116, 104, + 101, 32, 109, 105, 110, 100, 32, 116, 111, 32, + 115, 117, 102, 102, 101, 114, 10, 84, 104, 101, + 32, 115, 108, 105, 110, 103, 115, 32, 97, 110, + 100, 32, 97, 114, 114, 111, 119, 115, 32, 111, + 102, 32, 111, 117, 116, 114, 97, 103, 101, 111, + 117, 115, 32, 102, 111, 114, 116, 117, 110, 101, + 10, 79, 114, 32, 116, 111, 32, 116, 97, 107, + 101, 32, 97, 114, 109, 115, 32, 97, 103, 97, + 105, 110, 115, 116, 32, 97, 32, 115, 101, 97, + 32, 111, 102, 32, 116, 114, 111, 117, 98, 108, + 101, 115, 44, 10, 65, 110, 100, 32, 98, 121, + 32, 111, 112, 112, 111, 115, 105, 110, 103, 32, + 101, 110, 100, 32, 116, 104, 101, 109, 63, 45, + 45, 84, 111, 32, 100, 105, 101, 44, 45, 45, + 116, 111, 32, 115, 108, 101, 101, 112, 44, 45, + 45, 10, 78, 111, 32, 109, 111, 114, 101, 59, + 32, 97, 110, 100, 32, 98, 121, 32, 97, 32, + 115, 108, 101, 101, 112, 32, 116, 111, 32, 115, + 97, 121, 32, 119, 101, 32, 101, 110, 100, 10, + 84, 104, 101, 32, 104, 101, 97, 114, 116, 97, + 99, 104, 101, 44, 32, 97, 110, 100, 32, 116, + 104, 101, 32, 116, 104, 111, 117, 115, 97, 110, + 100, 32, 110, 97, 116, 117, 114, 97, 108, 32, + 115, 104, 111, 99, 107, 115, 10, 84, 104, 97, + 116, 32, 102, 108, 101, 115, 104, 32, 105, 115, + 32, 104, 101, 105, 114, 32, 116, 111, 44, 45, + 45, 39, 116, 105, 115, 32, 97, 32, 99, 111, + 110, 115, 117, 109, 109, 97, 116, 105, 111, 110, + 10, 68, 101, 118, 111, 117, 116, 108, 121, 32, + 116, 111, 32, 98, 101, 32, 119, 105, 115, 104, + 39, 100, 46, 32, 84, 111, 32, 100, 105, 101, + 44, 45, 45, 116, 111, 32, 115, 108, 101, 101, + 112, 59, 45, 45, 10, 84, 111, 32, 115, 108, + 101, 101, 112, 33, 32, 112, 101, 114, 99, 104, + 97, 110, 99, 101, 32, 116, 111, 32, 100, 114, + 101, 97, 109, 58, 45, 45, 97, 121, 44, 32, + 116, 104, 101, 114, 101, 39, 115, 32, 116, 104, + 101, 32, 114, 117, 98, 59, 10, 70, 111, 114, + 32, 105, 110, 32, 116, 104, 97, 116, 32, 115, + 108, 101, 101, 112, 32, 111, 102, 32, 100, 101, + 97, 116, 104, 32, 119, 104, 97, 116, 32, 100, + 114, 101, 97, 109, 115, 32, 109, 97, 121, 32, + 99, 111, 109, 101, 44, 10, 87, 104, 101, 110, + 32, 119, 101, 32, 104, 97, 118, 101, 32, 115, + 104, 117, 102, 102, 108, 101, 100, 32, 111, 102, + 102, 32, 116, 104, 105, 115, 32, 109, 111, 114, + 116, 97, 108, 32, 99, 111, 105, 108, 44, 10, + 77, 117, 115, 116, 32, 103, 105, 118, 101, 32, + 117, 115, 32, 112, 97, 117, 115, 101, 58, 32, + 116, 104, 101, 114, 101, 39, 115, 32, 116, 104, + 101, 32, 114, 101, 115, 112, 101, 99, 116, 10, + 84, 104, 97, 116, 32, 109, 97, 107, 101, 115, + 32, 99, 97, 108, 97, 109, 105, 116, 121, 32, + 111, 102, 32, 115, 111, 32, 108, 111, 110, 103, + 32, 108, 105, 102, 101, 59, 10, 70, 111, 114, + 32, 119, 104, 111, 32, 119, 111, 117, 108, 100, + 32, 98, 101, 97, 114, 32, 116, 104, 101, 32, + 119, 104, 105, 112, 115, 32, 97, 110, 100, 32, + 115, 99, 111, 114, 110, 115, 32, 111, 102, 32, + 116, 105, 109, 101, 44, 10, 84, 104, 101, 32, + 111, 112, 112, 114, 101, 115, 115, 111, 114, 39, + 115, 32, 119, 114, 111, 110, 103, 44, 32, 116, + 104, 101, 32, 112, 114, 111, 117, 100, 32, 109, + 97, 110, 39, 115, 32, 99, 111, 110, 116, 117, + 109, 101, 108, 121, 44, 10, 84, 104, 101, 32, + 112, 97, 110, 103, 115, 32, 111, 102, 32, 100, + 101, 115, 112, 105, 115, 39, 100, 32, 108, 111, + 118, 101, 44, 32, 116, 104, 101, 32, 108, 97, + 119, 39, 115, 32, 100, 101, 108, 97, 121, 44, + 10, 84, 104, 101, 32, 105, 110, 115, 111, 108, + 101, 110, 99, 101, 32, 111, 102, 32, 111, 102, + 102, 105, 99, 101, 44, 32, 97, 110, 100, 32, + 116, 104, 101, 32, 115, 112, 117, 114, 110, 115, + 10, 84, 104, 97, 116, 32, 112, 97, 116, 105, + 101, 110, 116, 32, 109, 101, 114, 105, 116, 32, + 111, 102, 32, 116, 104, 101, 32, 117, 110, 119, + 111, 114, 116, 104, 121, 32, 116, 97, 107, 101, + 115, 44, 10, 87, 104, 101, 110, 32, 104, 101, + 32, 104, 105, 109, 115, 101, 108, 102, 32, 109, + 105, 103, 104, 116, 32, 104, 105, 115, 32, 113, + 117, 105, 101, 116, 117, 115, 32, 109, 97, 107, + 101, 10, 87, 105, 116, 104, 32, 97, 32, 98, + 97, 114, 101, 32, 98, 111, 100, 107, 105, 110, + 63, 32, 119, 104, 111, 32, 119, 111, 117, 108, + 100, 32, 116, 104, 101, 115, 101, 32, 102, 97, + 114, 100, 101, 108, 115, 32, 98, 101, 97, 114, + 44, 10, 84, 111, 32, 103, 114, 117, 110, 116, + 32, 97, 110, 100, 32, 115, 119, 101, 97, 116, + 32, 117, 110, 100, 101, 114, 32, 97, 32, 119, + 101, 97, 114, 121, 32, 108, 105, 102, 101, 44, + 10, 66, 117, 116, 32, 116, 104, 97, 116, 32, + 116, 104, 101, 32, 100, 114, 101, 97, 100, 32, + 111, 102, 32, 115, 111, 109, 101, 116, 104, 105, + 110, 103, 32, 97, 102, 116, 101, 114, 32, 100, + 101, 97, 116, 104, 44, 45, 45, 10, 84, 104, + 101, 32, 117, 110, 100, 105, 115, 99, 111, 118, + 101, 114, 39, 100, 32, 99, 111, 117, 110, 116, + 114, 121, 44, 32, 102, 114, 111, 109, 32, 119, + 104, 111, 115, 101, 32, 98, 111, 117, 114, 110, + 10, 78, 111, 32, 116, 114, 97, 118, 101, 108, + 108, 101, 114, 32, 114, 101, 116, 117, 114, 110, + 115, 44, 45, 45, 112, 117, 122, 122, 108, 101, + 115, 32, 116, 104, 101, 32, 119, 105, 108, 108, + 44, 10, 65, 110, 100, 32, 109, 97, 107, 101, + 115, 32, 117, 115, 32, 114, 97, 116, 104, 101, + 114, 32, 98, 101, 97, 114, 32, 116, 104, 111, + 115, 101, 32, 105, 108, 108, 115, 32, 119, 101, + 32, 104, 97, 118, 101, 10, 84, 104, 97, 110, + 32, 102, 108, 121, 32, 116, 111, 32, 111, 116, + 104, 101, 114, 115, 32, 116, 104, 97, 116, 32, + 119, 101, 32, 107, 110, 111, 119, 32, 110, 111, + 116, 32, 111, 102, 63, 10, 84, 104, 117, 115, + 32, 99, 111, 110, 115, 99, 105, 101, 110, 99, + 101, 32, 100, 111, 101, 115, 32, 109, 97, 107, + 101, 32, 99, 111, 119, 97, 114, 100, 115, 32, + 111, 102, 32, 117, 115, 32, 97, 108, 108, 59, + 10, 65, 110, 100, 32, 116, 104, 117, 115, 32, + 116, 104, 101, 32, 110, 97, 116, 105, 118, 101, + 32, 104, 117, 101, 32, 111, 102, 32, 114, 101, + 115, 111, 108, 117, 116, 105, 111, 110, 10, 73, + 115, 32, 115, 105, 99, 107, 108, 105, 101, 100, + 32, 111, 39, 101, 114, 32, 119, 105, 116, 104, + 32, 116, 104, 101, 32, 112, 97, 108, 101, 32, + 99, 97, 115, 116, 32, 111, 102, 32, 116, 104, + 111, 117, 103, 104, 116, 59, 10, 65, 110, 100, + 32, 101, 110, 116, 101, 114, 112, 114, 105, 115, + 101, 115, 32, 111, 102, 32, 103, 114, 101, 97, + 116, 32, 112, 105, 116, 104, 32, 97, 110, 100, + 32, 109, 111, 109, 101, 110, 116, 44, 10, 87, + 105, 116, 104, 32, 116, 104, 105, 115, 32, 114, + 101, 103, 97, 114, 100, 44, 32, 116, 104, 101, + 105, 114, 32, 99, 117, 114, 114, 101, 110, 116, + 115, 32, 116, 117, 114, 110, 32, 97, 119, 114, + 121, 44, 10, 65, 110, 100, 32, 108, 111, 115, + 101, 32, 116, 104, 101, 32, 110, 97, 109, 101, + 32, 111, 102, 32, 97, 99, 116, 105, 111, 110, + 46, 45, 45, 83, 111, 102, 116, 32, 121, 111, + 117, 32, 110, 111, 119, 33, 10, 84, 104, 101, + 32, 102, 97, 105, 114, 32, 79, 112, 104, 101, + 108, 105, 97, 33, 45, 45, 78, 121, 109, 112, + 104, 44, 32, 105, 110, 32, 116, 104, 121, 32, + 111, 114, 105, 115, 111, 110, 115, 10, 66, 101, + 32, 97, 108, 108, 32, 109, 121, 32, 115, 105, + 110, 115, 32, 114, 101, 109, 101, 109, 98, 101, + 114, 39, 100, 46, 10, 0, +} + +var ( + prefixMD5Crypt = []byte("$1$") + prefixSHA1Crypt = []byte("$sha1$") + prefixSunMD5Crypt = []byte("$md5$") + prefixSunMD5CryptRounds = []byte("$md5,rounds=") + sepCrypt = []byte("$") +) diff --git a/vendor/github.com/go-crypt/x/crypt/crypt.go b/vendor/github.com/go-crypt/x/crypt/crypt.go new file mode 100644 index 0000000000..3bd2cdb130 --- /dev/null +++ b/vendor/github.com/go-crypt/x/crypt/crypt.go @@ -0,0 +1,300 @@ +package crypt + +import ( + "crypto/hmac" + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "hash" + "strconv" +) + +// KeySHACrypt calculates the shacrypt SHA256/SHA512 key given an appropriate hash.Hash, password, salt, and number of rounds. +func KeySHACrypt(hashFunc func() hash.Hash, password, salt []byte, rounds int) []byte { + // Step 1. + digest := hashFunc() + + size := digest.Size() + + switch size { + case sha1.Size: + return KeySHA1Crypt(password, salt, uint32(rounds)) + case sha256.Size, sha512.Size: + break + default: + return nil + } + + length := len(password) + + // Step 2. + digest.Write(password) + + // Step 3. + digest.Write(salt) + + // Step 4. + digestB := hashFunc() + + // Step 5. + digestB.Write(password) + + // Step 6. + digestB.Write(salt) + + // Step 7. + digestB.Write(password) + + // Step 8. + sumB := digestB.Sum(nil) + digestB.Reset() + digestB = nil + + // Step 9 and 10: + digest.Write(repeat(sumB, length)) + + // Step 11. + for i := length; i > 0; i >>= 1 { + if even(i) { + digest.Write(password) + } else { + digest.Write(sumB) + } + } + + clean(sumB) + sumB = nil + + // Step 12. + sumA := digest.Sum(nil) + digest.Reset() + + // Step 13-14. + for i := 0; i < length; i++ { + digest.Write(password) + } + + // Step 15. + sumDP := digest.Sum(nil) + digest.Reset() + + // Step 16. + seqP := repeat(sumDP, length) + sumDP = nil + + // Step 17-18. + for i := 0; i < 16+int(sumA[0]); i++ { + digest.Write(salt) + } + + // Step 19. + sumDS := digest.Sum(nil) + digest.Reset() + + // Step 20. + seqS := repeat(sumDS, len(salt)) + + // Step 21. + for i := 0; i < rounds; i++ { + digest.Reset() + + // Step 21 Sub-Step B and C. + if i&1 != 0 { + // Step 21 Sub-Step B. + digest.Write(seqP) + } else { + // Step 21 Sub-Step C. + digest.Write(sumA) + } + + // Step 21 Sub-Step D. + if i%3 != 0 { + digest.Write(seqS) + } + + // Step 21 Sub-Step E. + if i%7 != 0 { + digest.Write(seqP) + } + + // Step 21 Sub-Step F and G. + if i&1 != 0 { + // Step 21 Sub-Step F. + digest.Write(sumA) + } else { + // Step 21 Sub-Step G. + digest.Write(seqP) + } + + // Sub-Step H. + copy(sumA, digest.Sum(nil)) + } + + digest.Reset() + digest = nil + + seqP, seqS = nil, nil + + switch size { + case sha256.Size: + // Step 22 Sub Step E. + return permute(sumA, permuteTableSHACryptSHA256[:]) + case sha512.Size: + // Step 22 Sub Step E. + return permute(sumA, permuteTableSHACryptSHA512[:]) + } + + return nil +} + +// KeySHA1Crypt calculates the sha1crypt key given a password, salt, and number of rounds. +func KeySHA1Crypt(password, salt []byte, rounds uint32) []byte { + digest := hmac.New(sha1.New, password) + digest.Write(salt) + digest.Write(prefixSHA1Crypt) + digest.Write([]byte(strconv.FormatUint(uint64(rounds), 10))) + + sumA := digest.Sum(nil) + + if rounds == 0 { + return permute(sumA, permuteTableSHA1Crypt[:]) + } + + for rounds--; rounds > 0; rounds-- { + digest.Reset() + + digest.Write(sumA) + + copy(sumA, digest.Sum(nil)) + } + + return permute(sumA, permuteTableSHA1Crypt[:]) +} + +// KeyMD5Crypt calculates the md5crypt key given a password and salt. +func KeyMD5Crypt(password, salt []byte) []byte { + length := len(password) + + digest := md5.New() + + digest.Write(password) + digest.Write(salt) + digest.Write(password) + + sumB := digest.Sum(nil) + + digest.Reset() + + digest.Write(password) + digest.Write(prefixMD5Crypt) + digest.Write(salt) + digest.Write(repeat(sumB, length)) + + clean(sumB) + + for i := length; i > 0; i >>= 1 { + if even(i) { + digest.Write(password[0:1]) + } else { + digest.Write([]byte{0}) + } + } + + sumA := digest.Sum(nil) + + for i := 0; i < 1000; i++ { + digest.Reset() + + if even(i) { + digest.Write(sumA) + } else { + digest.Write(password) + } + + if i%3 != 0 { + digest.Write(salt) + } + + if i%7 != 0 { + digest.Write(password) + } + + if i&1 == 0 { + digest.Write(password) + } else { + digest.Write(sumA) + } + + copy(sumA, digest.Sum(nil)) + } + + return permute(sumA, permuteTableMD5Crypt[:]) +} + +// KeyMD5CryptSun calculates the md5crypt (Sun Version) key given a password, salt, and number rounds. +func KeyMD5CryptSun(password, salt []byte, rounds uint32) []byte { + digest := md5.New() + + digest.Write(password) + + if rounds == 0 { + digest.Write(prefixSunMD5Crypt) + digest.Write(salt) + digest.Write(sepCrypt) + } else { + digest.Write(prefixSunMD5CryptRounds) + digest.Write([]byte(strconv.FormatUint(uint64(rounds), 10))) + digest.Write(sepCrypt) + digest.Write(salt) + digest.Write(sepCrypt) + } + + sumA := digest.Sum(nil) + + iterations := uint32(rounds + 4096) + + bit := func(off uint32) uint32 { + off %= 128 + if (sumA[off/8] & (0x01 << (off % 8))) != 0 { + return 1 + } + + return 0 + } + + var ind7 [md5.Size]byte + + for i := uint32(0); i < iterations; i++ { + digest.Reset() + + digest.Write(sumA) + + for j := 0; j < md5.Size; j++ { + off := (j + 3) % 16 + ind4 := (sumA[j] >> (sumA[off] % 5)) & 0x0F + sh7 := (sumA[off] >> (sumA[j] % 8)) & 0x01 + ind7[j] = (sumA[ind4] >> sh7) & 0x7F + } + + var indA, indB uint32 + + for j := uint(0); j < 8; j++ { + indA |= bit(uint32(ind7[j])) << j + indB |= bit(uint32(ind7[j+8])) << j + } + + indA = (indA >> bit(i)) & 0x7F + indB = (indB >> bit(i+64)) & 0x7F + + if bit(indA)^bit(indB) == 1 { + digest.Write(magicTableMD5CryptSunHamlet[:]) + } + + digest.Write([]byte(strconv.FormatUint(uint64(i), 10))) + + copy(sumA, digest.Sum(nil)) + } + + return permute(sumA, permuteTableMD5Crypt[:]) +} diff --git a/vendor/github.com/go-crypt/x/crypt/util.go b/vendor/github.com/go-crypt/x/crypt/util.go new file mode 100644 index 0000000000..c5e6fa4477 --- /dev/null +++ b/vendor/github.com/go-crypt/x/crypt/util.go @@ -0,0 +1,54 @@ +package crypt + +import ( + b64 "github.com/go-crypt/x/base64" +) + +func permute(sum, table []byte) []byte { + size := len(table) + + key := make([]byte, size) + + for i := 0; i < size; i++ { + key[i] = sum[table[i]] + } + + return b64.EncodeCrypt(key) +} + +func even(i int) bool { + return i%2 == 0 +} + +var ( + cleanBytes = make([]byte, 64) +) + +func clean(b []byte) { + l := len(b) + + for ; l > 64; l -= 64 { + copy(b[l-64:l], cleanBytes) + } + + if l > 0 { + copy(b[0:l], cleanBytes[0:l]) + } +} + +func repeat(input []byte, length int) []byte { + var ( + seq = make([]byte, length) + unit = len(input) + ) + + j := length / unit * unit + for i := 0; i < j; i += unit { + copy(seq[i:length], input) + } + if j < length { + copy(seq[j:length], input[0:length-j]) + } + + return seq +} diff --git a/vendor/github.com/gogs/chardet/2022.go b/vendor/github.com/gogs/chardet/2022.go new file mode 100644 index 0000000000..e667225e5e --- /dev/null +++ b/vendor/github.com/gogs/chardet/2022.go @@ -0,0 +1,102 @@ +package chardet + +import ( + "bytes" +) + +type recognizer2022 struct { + charset string + escapes [][]byte +} + +func (r *recognizer2022) Match(input *recognizerInput) (output recognizerOutput) { + return recognizerOutput{ + Charset: r.charset, + Confidence: r.matchConfidence(input.input), + } +} + +func (r *recognizer2022) matchConfidence(input []byte) int { + var hits, misses, shifts int +input: + for i := 0; i < len(input); i++ { + c := input[i] + if c == 0x1B { + for _, esc := range r.escapes { + if bytes.HasPrefix(input[i+1:], esc) { + hits++ + i += len(esc) + continue input + } + } + misses++ + } else if c == 0x0E || c == 0x0F { + shifts++ + } + } + if hits == 0 { + return 0 + } + quality := (100*hits - 100*misses) / (hits + misses) + if hits+shifts < 5 { + quality -= (5 - (hits + shifts)) * 10 + } + if quality < 0 { + quality = 0 + } + return quality +} + +var escapeSequences_2022JP = [][]byte{ + {0x24, 0x28, 0x43}, // KS X 1001:1992 + {0x24, 0x28, 0x44}, // JIS X 212-1990 + {0x24, 0x40}, // JIS C 6226-1978 + {0x24, 0x41}, // GB 2312-80 + {0x24, 0x42}, // JIS X 208-1983 + {0x26, 0x40}, // JIS X 208 1990, 1997 + {0x28, 0x42}, // ASCII + {0x28, 0x48}, // JIS-Roman + {0x28, 0x49}, // Half-width katakana + {0x28, 0x4a}, // JIS-Roman + {0x2e, 0x41}, // ISO 8859-1 + {0x2e, 0x46}, // ISO 8859-7 +} + +var escapeSequences_2022KR = [][]byte{ + {0x24, 0x29, 0x43}, +} + +var escapeSequences_2022CN = [][]byte{ + {0x24, 0x29, 0x41}, // GB 2312-80 + {0x24, 0x29, 0x47}, // CNS 11643-1992 Plane 1 + {0x24, 0x2A, 0x48}, // CNS 11643-1992 Plane 2 + {0x24, 0x29, 0x45}, // ISO-IR-165 + {0x24, 0x2B, 0x49}, // CNS 11643-1992 Plane 3 + {0x24, 0x2B, 0x4A}, // CNS 11643-1992 Plane 4 + {0x24, 0x2B, 0x4B}, // CNS 11643-1992 Plane 5 + {0x24, 0x2B, 0x4C}, // CNS 11643-1992 Plane 6 + {0x24, 0x2B, 0x4D}, // CNS 11643-1992 Plane 7 + {0x4e}, // SS2 + {0x4f}, // SS3 +} + +func newRecognizer_2022JP() *recognizer2022 { + return &recognizer2022{ + "ISO-2022-JP", + escapeSequences_2022JP, + } +} + +func newRecognizer_2022KR() *recognizer2022 { + return &recognizer2022{ + "ISO-2022-KR", + escapeSequences_2022KR, + } +} + +func newRecognizer_2022CN() *recognizer2022 { + return &recognizer2022{ + "ISO-2022-CN", + escapeSequences_2022CN, + } +} diff --git a/vendor/github.com/gogs/chardet/AUTHORS b/vendor/github.com/gogs/chardet/AUTHORS new file mode 100644 index 0000000000..842d0216db --- /dev/null +++ b/vendor/github.com/gogs/chardet/AUTHORS @@ -0,0 +1 @@ +Sheng Yu (yusheng dot sjtu at gmail dot com) diff --git a/vendor/github.com/gogs/chardet/LICENSE b/vendor/github.com/gogs/chardet/LICENSE new file mode 100644 index 0000000000..35ee796b94 --- /dev/null +++ b/vendor/github.com/gogs/chardet/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2012 chardet Authors + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Partial of the Software is derived from ICU project. See icu-license.html for +license of the derivative portions. diff --git a/vendor/github.com/gogs/chardet/README.md b/vendor/github.com/gogs/chardet/README.md new file mode 100644 index 0000000000..f5c7cc2087 --- /dev/null +++ b/vendor/github.com/gogs/chardet/README.md @@ -0,0 +1,12 @@ +# chardet + +chardet is library to automatically detect +[charset](http://en.wikipedia.org/wiki/Character_encoding) of texts for [Go +programming language](http://golang.org/). It's based on the algorithm and data +in [ICU](http://icu-project.org/)'s implementation. + +The project was created by [saintfish](http://github.com/saintfish/chardet). In January 2015 it was forked by the gogits project in order to incorporate bugfixes and new features. + +## Documentation and Usage + +See [pkgdoc](http://godoc.org/github.com/gogs/chardet) diff --git a/vendor/github.com/gogs/chardet/detector.go b/vendor/github.com/gogs/chardet/detector.go new file mode 100644 index 0000000000..027e1c37bb --- /dev/null +++ b/vendor/github.com/gogs/chardet/detector.go @@ -0,0 +1,147 @@ +// Package chardet ports character set detection from ICU. +package chardet + +import ( + "errors" + "sort" +) + +// Result contains all the information that charset detector gives. +type Result struct { + // IANA name of the detected charset. + Charset string + // IANA name of the detected language. It may be empty for some charsets. + Language string + // Confidence of the Result. Scale from 1 to 100. The bigger, the more confident. + Confidence int +} + +// Detector implements charset detection. +type Detector struct { + recognizers []recognizer + stripTag bool +} + +// List of charset recognizers +var recognizers = []recognizer{ + newRecognizer_utf8(), + newRecognizer_utf16be(), + newRecognizer_utf16le(), + newRecognizer_utf32be(), + newRecognizer_utf32le(), + newRecognizer_8859_1_en(), + newRecognizer_8859_1_da(), + newRecognizer_8859_1_de(), + newRecognizer_8859_1_es(), + newRecognizer_8859_1_fr(), + newRecognizer_8859_1_it(), + newRecognizer_8859_1_nl(), + newRecognizer_8859_1_no(), + newRecognizer_8859_1_pt(), + newRecognizer_8859_1_sv(), + newRecognizer_8859_2_cs(), + newRecognizer_8859_2_hu(), + newRecognizer_8859_2_pl(), + newRecognizer_8859_2_ro(), + newRecognizer_8859_5_ru(), + newRecognizer_8859_6_ar(), + newRecognizer_8859_7_el(), + newRecognizer_8859_8_I_he(), + newRecognizer_8859_8_he(), + newRecognizer_windows_1251(), + newRecognizer_windows_1256(), + newRecognizer_KOI8_R(), + newRecognizer_8859_9_tr(), + + newRecognizer_sjis(), + newRecognizer_gb_18030(), + newRecognizer_euc_jp(), + newRecognizer_euc_kr(), + newRecognizer_big5(), + + newRecognizer_2022JP(), + newRecognizer_2022KR(), + newRecognizer_2022CN(), + + newRecognizer_IBM424_he_rtl(), + newRecognizer_IBM424_he_ltr(), + newRecognizer_IBM420_ar_rtl(), + newRecognizer_IBM420_ar_ltr(), +} + +// NewTextDetector creates a Detector for plain text. +func NewTextDetector() *Detector { + return &Detector{recognizers, false} +} + +// NewHtmlDetector creates a Detector for Html. +func NewHtmlDetector() *Detector { + return &Detector{recognizers, true} +} + +var ( + NotDetectedError = errors.New("Charset not detected.") +) + +// DetectBest returns the Result with highest Confidence. +func (d *Detector) DetectBest(b []byte) (r *Result, err error) { + input := newRecognizerInput(b, d.stripTag) + outputChan := make(chan recognizerOutput) + for _, r := range d.recognizers { + go matchHelper(r, input, outputChan) + } + var output Result + for i := 0; i < len(d.recognizers); i++ { + o := <-outputChan + if output.Confidence < o.Confidence { + output = Result(o) + } + } + if output.Confidence == 0 { + return nil, NotDetectedError + } + return &output, nil +} + +// DetectAll returns all Results which have non-zero Confidence. The Results are sorted by Confidence in descending order. +func (d *Detector) DetectAll(b []byte) ([]Result, error) { + input := newRecognizerInput(b, d.stripTag) + outputChan := make(chan recognizerOutput) + for _, r := range d.recognizers { + go matchHelper(r, input, outputChan) + } + outputs := make(recognizerOutputs, 0, len(d.recognizers)) + for i := 0; i < len(d.recognizers); i++ { + o := <-outputChan + if o.Confidence > 0 { + outputs = append(outputs, o) + } + } + if len(outputs) == 0 { + return nil, NotDetectedError + } + + sort.Sort(outputs) + dedupOutputs := make([]Result, 0, len(outputs)) + foundCharsets := make(map[string]struct{}, len(outputs)) + for _, o := range outputs { + if _, found := foundCharsets[o.Charset]; !found { + dedupOutputs = append(dedupOutputs, Result(o)) + foundCharsets[o.Charset] = struct{}{} + } + } + if len(dedupOutputs) == 0 { + return nil, NotDetectedError + } + return dedupOutputs, nil +} + +func matchHelper(r recognizer, input *recognizerInput, outputChan chan<- recognizerOutput) { + outputChan <- r.Match(input) +} + +type recognizerOutputs []recognizerOutput + +func (r recognizerOutputs) Len() int { return len(r) } +func (r recognizerOutputs) Less(i, j int) bool { return r[i].Confidence > r[j].Confidence } +func (r recognizerOutputs) Swap(i, j int) { r[i], r[j] = r[j], r[i] } diff --git a/vendor/github.com/gogs/chardet/icu-license.html b/vendor/github.com/gogs/chardet/icu-license.html new file mode 100644 index 0000000000..d078d0575b --- /dev/null +++ b/vendor/github.com/gogs/chardet/icu-license.html @@ -0,0 +1,51 @@ + + + + +ICU License - ICU 1.8.1 and later + + + +

ICU License - ICU 1.8.1 and later

+ +

COPYRIGHT AND PERMISSION NOTICE

+ +

+Copyright (c) 1995-2012 International Business Machines Corporation and others +

+

+All rights reserved. +

+

+Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, and/or sell +copies of the Software, and to permit persons +to whom the Software is furnished to do so, provided that the above +copyright notice(s) and this permission notice appear in all copies +of the Software and that both the above copyright notice(s) and this +permission notice appear in supporting documentation. +

+

+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL +THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, +OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER +RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. +

+

+Except as contained in this notice, the name of a copyright holder shall not be +used in advertising or otherwise to promote the sale, use or other dealings in +this Software without prior written authorization of the copyright holder. +

+ +
+

+All trademarks and registered trademarks mentioned herein are the property of their respective owners. +

+ + diff --git a/vendor/github.com/gogs/chardet/multi_byte.go b/vendor/github.com/gogs/chardet/multi_byte.go new file mode 100644 index 0000000000..b5cdf3d6de --- /dev/null +++ b/vendor/github.com/gogs/chardet/multi_byte.go @@ -0,0 +1,345 @@ +package chardet + +import ( + "errors" + "math" +) + +type recognizerMultiByte struct { + charset string + language string + decoder charDecoder + commonChars []uint16 +} + +type charDecoder interface { + DecodeOneChar([]byte) (c uint16, remain []byte, err error) +} + +func (r *recognizerMultiByte) Match(input *recognizerInput) (output recognizerOutput) { + return recognizerOutput{ + Charset: r.charset, + Language: r.language, + Confidence: r.matchConfidence(input), + } +} + +func (r *recognizerMultiByte) matchConfidence(input *recognizerInput) int { + raw := input.raw + var c uint16 + var err error + var totalCharCount, badCharCount, singleByteCharCount, doubleByteCharCount, commonCharCount int + for c, raw, err = r.decoder.DecodeOneChar(raw); len(raw) > 0; c, raw, err = r.decoder.DecodeOneChar(raw) { + totalCharCount++ + if err != nil { + badCharCount++ + } else if c <= 0xFF { + singleByteCharCount++ + } else { + doubleByteCharCount++ + if r.commonChars != nil && binarySearch(r.commonChars, c) { + commonCharCount++ + } + } + if badCharCount >= 2 && badCharCount*5 >= doubleByteCharCount { + return 0 + } + } + + if doubleByteCharCount <= 10 && badCharCount == 0 { + if doubleByteCharCount == 0 && totalCharCount < 10 { + return 0 + } else { + return 10 + } + } + + if doubleByteCharCount < 20*badCharCount { + return 0 + } + if r.commonChars == nil { + confidence := 30 + doubleByteCharCount - 20*badCharCount + if confidence > 100 { + confidence = 100 + } + return confidence + } + maxVal := math.Log(float64(doubleByteCharCount) / 4) + scaleFactor := 90 / maxVal + confidence := int(math.Log(float64(commonCharCount)+1)*scaleFactor + 10) + if confidence > 100 { + confidence = 100 + } + if confidence < 0 { + confidence = 0 + } + return confidence +} + +func binarySearch(l []uint16, c uint16) bool { + start := 0 + end := len(l) - 1 + for start <= end { + mid := (start + end) / 2 + if c == l[mid] { + return true + } else if c < l[mid] { + end = mid - 1 + } else { + start = mid + 1 + } + } + return false +} + +var eobError = errors.New("End of input buffer") +var badCharError = errors.New("Decode a bad char") + +type charDecoder_sjis struct { +} + +func (charDecoder_sjis) DecodeOneChar(input []byte) (c uint16, remain []byte, err error) { + if len(input) == 0 { + return 0, nil, eobError + } + first := input[0] + c = uint16(first) + remain = input[1:] + if first <= 0x7F || (first > 0xA0 && first <= 0xDF) { + return + } + if len(remain) == 0 { + return c, remain, badCharError + } + second := remain[0] + remain = remain[1:] + c = c<<8 | uint16(second) + if (second >= 0x40 && second <= 0x7F) || (second >= 0x80 && second <= 0xFE) { + } else { + err = badCharError + } + return +} + +var commonChars_sjis = []uint16{ + 0x8140, 0x8141, 0x8142, 0x8145, 0x815b, 0x8169, 0x816a, 0x8175, 0x8176, 0x82a0, + 0x82a2, 0x82a4, 0x82a9, 0x82aa, 0x82ab, 0x82ad, 0x82af, 0x82b1, 0x82b3, 0x82b5, + 0x82b7, 0x82bd, 0x82be, 0x82c1, 0x82c4, 0x82c5, 0x82c6, 0x82c8, 0x82c9, 0x82cc, + 0x82cd, 0x82dc, 0x82e0, 0x82e7, 0x82e8, 0x82e9, 0x82ea, 0x82f0, 0x82f1, 0x8341, + 0x8343, 0x834e, 0x834f, 0x8358, 0x835e, 0x8362, 0x8367, 0x8375, 0x8376, 0x8389, + 0x838a, 0x838b, 0x838d, 0x8393, 0x8e96, 0x93fa, 0x95aa, +} + +func newRecognizer_sjis() *recognizerMultiByte { + return &recognizerMultiByte{ + "Shift_JIS", + "ja", + charDecoder_sjis{}, + commonChars_sjis, + } +} + +type charDecoder_euc struct { +} + +func (charDecoder_euc) DecodeOneChar(input []byte) (c uint16, remain []byte, err error) { + if len(input) == 0 { + return 0, nil, eobError + } + first := input[0] + remain = input[1:] + c = uint16(first) + if first <= 0x8D { + return uint16(first), remain, nil + } + if len(remain) == 0 { + return 0, nil, eobError + } + second := remain[0] + remain = remain[1:] + c = c<<8 | uint16(second) + if first >= 0xA1 && first <= 0xFE { + if second < 0xA1 { + err = badCharError + } + return + } + if first == 0x8E { + if second < 0xA1 { + err = badCharError + } + return + } + if first == 0x8F { + if len(remain) == 0 { + return 0, nil, eobError + } + third := remain[0] + remain = remain[1:] + c = c<<0 | uint16(third) + if third < 0xa1 { + err = badCharError + } + } + return +} + +var commonChars_euc_jp = []uint16{ + 0xa1a1, 0xa1a2, 0xa1a3, 0xa1a6, 0xa1bc, 0xa1ca, 0xa1cb, 0xa1d6, 0xa1d7, 0xa4a2, + 0xa4a4, 0xa4a6, 0xa4a8, 0xa4aa, 0xa4ab, 0xa4ac, 0xa4ad, 0xa4af, 0xa4b1, 0xa4b3, + 0xa4b5, 0xa4b7, 0xa4b9, 0xa4bb, 0xa4bd, 0xa4bf, 0xa4c0, 0xa4c1, 0xa4c3, 0xa4c4, + 0xa4c6, 0xa4c7, 0xa4c8, 0xa4c9, 0xa4ca, 0xa4cb, 0xa4ce, 0xa4cf, 0xa4d0, 0xa4de, + 0xa4df, 0xa4e1, 0xa4e2, 0xa4e4, 0xa4e8, 0xa4e9, 0xa4ea, 0xa4eb, 0xa4ec, 0xa4ef, + 0xa4f2, 0xa4f3, 0xa5a2, 0xa5a3, 0xa5a4, 0xa5a6, 0xa5a7, 0xa5aa, 0xa5ad, 0xa5af, + 0xa5b0, 0xa5b3, 0xa5b5, 0xa5b7, 0xa5b8, 0xa5b9, 0xa5bf, 0xa5c3, 0xa5c6, 0xa5c7, + 0xa5c8, 0xa5c9, 0xa5cb, 0xa5d0, 0xa5d5, 0xa5d6, 0xa5d7, 0xa5de, 0xa5e0, 0xa5e1, + 0xa5e5, 0xa5e9, 0xa5ea, 0xa5eb, 0xa5ec, 0xa5ed, 0xa5f3, 0xb8a9, 0xb9d4, 0xbaee, + 0xbbc8, 0xbef0, 0xbfb7, 0xc4ea, 0xc6fc, 0xc7bd, 0xcab8, 0xcaf3, 0xcbdc, 0xcdd1, +} + +var commonChars_euc_kr = []uint16{ + 0xb0a1, 0xb0b3, 0xb0c5, 0xb0cd, 0xb0d4, 0xb0e6, 0xb0ed, 0xb0f8, 0xb0fa, 0xb0fc, + 0xb1b8, 0xb1b9, 0xb1c7, 0xb1d7, 0xb1e2, 0xb3aa, 0xb3bb, 0xb4c2, 0xb4cf, 0xb4d9, + 0xb4eb, 0xb5a5, 0xb5b5, 0xb5bf, 0xb5c7, 0xb5e9, 0xb6f3, 0xb7af, 0xb7c2, 0xb7ce, + 0xb8a6, 0xb8ae, 0xb8b6, 0xb8b8, 0xb8bb, 0xb8e9, 0xb9ab, 0xb9ae, 0xb9cc, 0xb9ce, + 0xb9fd, 0xbab8, 0xbace, 0xbad0, 0xbaf1, 0xbbe7, 0xbbf3, 0xbbfd, 0xbcad, 0xbcba, + 0xbcd2, 0xbcf6, 0xbdba, 0xbdc0, 0xbdc3, 0xbdc5, 0xbec6, 0xbec8, 0xbedf, 0xbeee, + 0xbef8, 0xbefa, 0xbfa1, 0xbfa9, 0xbfc0, 0xbfe4, 0xbfeb, 0xbfec, 0xbff8, 0xc0a7, + 0xc0af, 0xc0b8, 0xc0ba, 0xc0bb, 0xc0bd, 0xc0c7, 0xc0cc, 0xc0ce, 0xc0cf, 0xc0d6, + 0xc0da, 0xc0e5, 0xc0fb, 0xc0fc, 0xc1a4, 0xc1a6, 0xc1b6, 0xc1d6, 0xc1df, 0xc1f6, + 0xc1f8, 0xc4a1, 0xc5cd, 0xc6ae, 0xc7cf, 0xc7d1, 0xc7d2, 0xc7d8, 0xc7e5, 0xc8ad, +} + +func newRecognizer_euc_jp() *recognizerMultiByte { + return &recognizerMultiByte{ + "EUC-JP", + "ja", + charDecoder_euc{}, + commonChars_euc_jp, + } +} + +func newRecognizer_euc_kr() *recognizerMultiByte { + return &recognizerMultiByte{ + "EUC-KR", + "ko", + charDecoder_euc{}, + commonChars_euc_kr, + } +} + +type charDecoder_big5 struct { +} + +func (charDecoder_big5) DecodeOneChar(input []byte) (c uint16, remain []byte, err error) { + if len(input) == 0 { + return 0, nil, eobError + } + first := input[0] + remain = input[1:] + c = uint16(first) + if first <= 0x7F || first == 0xFF { + return + } + if len(remain) == 0 { + return c, nil, eobError + } + second := remain[0] + remain = remain[1:] + c = c<<8 | uint16(second) + if second < 0x40 || second == 0x7F || second == 0xFF { + err = badCharError + } + return +} + +var commonChars_big5 = []uint16{ + 0xa140, 0xa141, 0xa142, 0xa143, 0xa147, 0xa149, 0xa175, 0xa176, 0xa440, 0xa446, + 0xa447, 0xa448, 0xa451, 0xa454, 0xa457, 0xa464, 0xa46a, 0xa46c, 0xa477, 0xa4a3, + 0xa4a4, 0xa4a7, 0xa4c1, 0xa4ce, 0xa4d1, 0xa4df, 0xa4e8, 0xa4fd, 0xa540, 0xa548, + 0xa558, 0xa569, 0xa5cd, 0xa5e7, 0xa657, 0xa661, 0xa662, 0xa668, 0xa670, 0xa6a8, + 0xa6b3, 0xa6b9, 0xa6d3, 0xa6db, 0xa6e6, 0xa6f2, 0xa740, 0xa751, 0xa759, 0xa7da, + 0xa8a3, 0xa8a5, 0xa8ad, 0xa8d1, 0xa8d3, 0xa8e4, 0xa8fc, 0xa9c0, 0xa9d2, 0xa9f3, + 0xaa6b, 0xaaba, 0xaabe, 0xaacc, 0xaafc, 0xac47, 0xac4f, 0xacb0, 0xacd2, 0xad59, + 0xaec9, 0xafe0, 0xb0ea, 0xb16f, 0xb2b3, 0xb2c4, 0xb36f, 0xb44c, 0xb44e, 0xb54c, + 0xb5a5, 0xb5bd, 0xb5d0, 0xb5d8, 0xb671, 0xb7ed, 0xb867, 0xb944, 0xbad8, 0xbb44, + 0xbba1, 0xbdd1, 0xc2c4, 0xc3b9, 0xc440, 0xc45f, +} + +func newRecognizer_big5() *recognizerMultiByte { + return &recognizerMultiByte{ + "Big5", + "zh", + charDecoder_big5{}, + commonChars_big5, + } +} + +type charDecoder_gb_18030 struct { +} + +func (charDecoder_gb_18030) DecodeOneChar(input []byte) (c uint16, remain []byte, err error) { + if len(input) == 0 { + return 0, nil, eobError + } + first := input[0] + remain = input[1:] + c = uint16(first) + if first <= 0x80 { + return + } + if len(remain) == 0 { + return 0, nil, eobError + } + second := remain[0] + remain = remain[1:] + c = c<<8 | uint16(second) + if first >= 0x81 && first <= 0xFE { + if (second >= 0x40 && second <= 0x7E) || (second >= 0x80 && second <= 0xFE) { + return + } + + if second >= 0x30 && second <= 0x39 { + if len(remain) == 0 { + return 0, nil, eobError + } + third := remain[0] + remain = remain[1:] + if third >= 0x81 && third <= 0xFE { + if len(remain) == 0 { + return 0, nil, eobError + } + fourth := remain[0] + remain = remain[1:] + if fourth >= 0x30 && fourth <= 0x39 { + c = c<<16 | uint16(third)<<8 | uint16(fourth) + return + } + } + } + err = badCharError + } + return +} + +var commonChars_gb_18030 = []uint16{ + 0xa1a1, 0xa1a2, 0xa1a3, 0xa1a4, 0xa1b0, 0xa1b1, 0xa1f1, 0xa1f3, 0xa3a1, 0xa3ac, + 0xa3ba, 0xb1a8, 0xb1b8, 0xb1be, 0xb2bb, 0xb3c9, 0xb3f6, 0xb4f3, 0xb5bd, 0xb5c4, + 0xb5e3, 0xb6af, 0xb6d4, 0xb6e0, 0xb7a2, 0xb7a8, 0xb7bd, 0xb7d6, 0xb7dd, 0xb8b4, + 0xb8df, 0xb8f6, 0xb9ab, 0xb9c9, 0xb9d8, 0xb9fa, 0xb9fd, 0xbacd, 0xbba7, 0xbbd6, + 0xbbe1, 0xbbfa, 0xbcbc, 0xbcdb, 0xbcfe, 0xbdcc, 0xbecd, 0xbedd, 0xbfb4, 0xbfc6, + 0xbfc9, 0xc0b4, 0xc0ed, 0xc1cb, 0xc2db, 0xc3c7, 0xc4dc, 0xc4ea, 0xc5cc, 0xc6f7, + 0xc7f8, 0xc8ab, 0xc8cb, 0xc8d5, 0xc8e7, 0xc9cf, 0xc9fa, 0xcab1, 0xcab5, 0xcac7, + 0xcad0, 0xcad6, 0xcaf5, 0xcafd, 0xccec, 0xcdf8, 0xceaa, 0xcec4, 0xced2, 0xcee5, + 0xcfb5, 0xcfc2, 0xcfd6, 0xd0c2, 0xd0c5, 0xd0d0, 0xd0d4, 0xd1a7, 0xd2aa, 0xd2b2, + 0xd2b5, 0xd2bb, 0xd2d4, 0xd3c3, 0xd3d0, 0xd3fd, 0xd4c2, 0xd4da, 0xd5e2, 0xd6d0, +} + +func newRecognizer_gb_18030() *recognizerMultiByte { + return &recognizerMultiByte{ + "GB18030", + "zh", + charDecoder_gb_18030{}, + commonChars_gb_18030, + } +} diff --git a/vendor/github.com/gogs/chardet/recognizer.go b/vendor/github.com/gogs/chardet/recognizer.go new file mode 100644 index 0000000000..1bf8461c3e --- /dev/null +++ b/vendor/github.com/gogs/chardet/recognizer.go @@ -0,0 +1,83 @@ +package chardet + +type recognizer interface { + Match(*recognizerInput) recognizerOutput +} + +type recognizerOutput Result + +type recognizerInput struct { + raw []byte + input []byte + tagStripped bool + byteStats []int + hasC1Bytes bool +} + +func newRecognizerInput(raw []byte, stripTag bool) *recognizerInput { + input, stripped := mayStripInput(raw, stripTag) + byteStats := computeByteStats(input) + return &recognizerInput{ + raw: raw, + input: input, + tagStripped: stripped, + byteStats: byteStats, + hasC1Bytes: computeHasC1Bytes(byteStats), + } +} + +func mayStripInput(raw []byte, stripTag bool) (out []byte, stripped bool) { + const inputBufferSize = 8192 + out = make([]byte, 0, inputBufferSize) + var badTags, openTags int32 + var inMarkup bool = false + stripped = false + if stripTag { + stripped = true + for _, c := range raw { + if c == '<' { + if inMarkup { + badTags += 1 + } + inMarkup = true + openTags += 1 + } + if !inMarkup { + out = append(out, c) + if len(out) >= inputBufferSize { + break + } + } + if c == '>' { + inMarkup = false + } + } + } + if openTags < 5 || openTags/5 < badTags || (len(out) < 100 && len(raw) > 600) { + limit := len(raw) + if limit > inputBufferSize { + limit = inputBufferSize + } + out = make([]byte, limit) + copy(out, raw[:limit]) + stripped = false + } + return +} + +func computeByteStats(input []byte) []int { + r := make([]int, 256) + for _, c := range input { + r[c] += 1 + } + return r +} + +func computeHasC1Bytes(byteStats []int) bool { + for _, count := range byteStats[0x80 : 0x9F+1] { + if count > 0 { + return true + } + } + return false +} diff --git a/vendor/github.com/gogs/chardet/single_byte.go b/vendor/github.com/gogs/chardet/single_byte.go new file mode 100644 index 0000000000..a7ce39bc2f --- /dev/null +++ b/vendor/github.com/gogs/chardet/single_byte.go @@ -0,0 +1,882 @@ +package chardet + +// Recognizer for single byte charset family +type recognizerSingleByte struct { + charset string + hasC1ByteCharset string + language string + charMap *[256]byte + ngram *[64]uint32 +} + +func (r *recognizerSingleByte) Match(input *recognizerInput) recognizerOutput { + var charset string = r.charset + if input.hasC1Bytes && len(r.hasC1ByteCharset) > 0 { + charset = r.hasC1ByteCharset + } + return recognizerOutput{ + Charset: charset, + Language: r.language, + Confidence: r.parseNgram(input.input), + } +} + +type ngramState struct { + ngram uint32 + ignoreSpace bool + ngramCount, ngramHit uint32 + table *[64]uint32 +} + +func newNgramState(table *[64]uint32) *ngramState { + return &ngramState{ + ngram: 0, + ignoreSpace: false, + ngramCount: 0, + ngramHit: 0, + table: table, + } +} + +func (s *ngramState) AddByte(b byte) { + const ngramMask = 0xFFFFFF + if !(b == 0x20 && s.ignoreSpace) { + s.ngram = ((s.ngram << 8) | uint32(b)) & ngramMask + s.ignoreSpace = (s.ngram == 0x20) + s.ngramCount++ + if s.lookup() { + s.ngramHit++ + } + } + s.ignoreSpace = (b == 0x20) +} + +func (s *ngramState) HitRate() float32 { + if s.ngramCount == 0 { + return 0 + } + return float32(s.ngramHit) / float32(s.ngramCount) +} + +func (s *ngramState) lookup() bool { + var index int + if s.table[index+32] <= s.ngram { + index += 32 + } + if s.table[index+16] <= s.ngram { + index += 16 + } + if s.table[index+8] <= s.ngram { + index += 8 + } + if s.table[index+4] <= s.ngram { + index += 4 + } + if s.table[index+2] <= s.ngram { + index += 2 + } + if s.table[index+1] <= s.ngram { + index += 1 + } + if s.table[index] > s.ngram { + index -= 1 + } + if index < 0 || s.table[index] != s.ngram { + return false + } + return true +} + +func (r *recognizerSingleByte) parseNgram(input []byte) int { + state := newNgramState(r.ngram) + for _, inChar := range input { + c := r.charMap[inChar] + if c != 0 { + state.AddByte(c) + } + } + state.AddByte(0x20) + rate := state.HitRate() + if rate > 0.33 { + return 98 + } + return int(rate * 300) +} + +var charMap_8859_1 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0xAA, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0xB5, 0x20, 0x20, + 0x20, 0x20, 0xBA, 0x20, 0x20, 0x20, 0x20, 0x20, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0x20, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xDF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0x20, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF, +} + +var ngrams_8859_1_en = [64]uint32{ + 0x206120, 0x20616E, 0x206265, 0x20636F, 0x20666F, 0x206861, 0x206865, 0x20696E, 0x206D61, 0x206F66, 0x207072, 0x207265, 0x207361, 0x207374, 0x207468, 0x20746F, + 0x207768, 0x616964, 0x616C20, 0x616E20, 0x616E64, 0x617320, 0x617420, 0x617465, 0x617469, 0x642061, 0x642074, 0x652061, 0x652073, 0x652074, 0x656420, 0x656E74, + 0x657220, 0x657320, 0x666F72, 0x686174, 0x686520, 0x686572, 0x696420, 0x696E20, 0x696E67, 0x696F6E, 0x697320, 0x6E2061, 0x6E2074, 0x6E6420, 0x6E6720, 0x6E7420, + 0x6F6620, 0x6F6E20, 0x6F7220, 0x726520, 0x727320, 0x732061, 0x732074, 0x736169, 0x737420, 0x742074, 0x746572, 0x746861, 0x746865, 0x74696F, 0x746F20, 0x747320, +} + +var ngrams_8859_1_da = [64]uint32{ + 0x206166, 0x206174, 0x206465, 0x20656E, 0x206572, 0x20666F, 0x206861, 0x206920, 0x206D65, 0x206F67, 0x2070E5, 0x207369, 0x207374, 0x207469, 0x207669, 0x616620, + 0x616E20, 0x616E64, 0x617220, 0x617420, 0x646520, 0x64656E, 0x646572, 0x646574, 0x652073, 0x656420, 0x656465, 0x656E20, 0x656E64, 0x657220, 0x657265, 0x657320, + 0x657420, 0x666F72, 0x676520, 0x67656E, 0x676572, 0x696765, 0x696C20, 0x696E67, 0x6B6520, 0x6B6B65, 0x6C6572, 0x6C6967, 0x6C6C65, 0x6D6564, 0x6E6465, 0x6E6520, + 0x6E6720, 0x6E6765, 0x6F6720, 0x6F6D20, 0x6F7220, 0x70E520, 0x722064, 0x722065, 0x722073, 0x726520, 0x737465, 0x742073, 0x746520, 0x746572, 0x74696C, 0x766572, +} + +var ngrams_8859_1_de = [64]uint32{ + 0x20616E, 0x206175, 0x206265, 0x206461, 0x206465, 0x206469, 0x206569, 0x206765, 0x206861, 0x20696E, 0x206D69, 0x207363, 0x207365, 0x20756E, 0x207665, 0x20766F, + 0x207765, 0x207A75, 0x626572, 0x636820, 0x636865, 0x636874, 0x646173, 0x64656E, 0x646572, 0x646965, 0x652064, 0x652073, 0x65696E, 0x656974, 0x656E20, 0x657220, + 0x657320, 0x67656E, 0x68656E, 0x687420, 0x696368, 0x696520, 0x696E20, 0x696E65, 0x697420, 0x6C6963, 0x6C6C65, 0x6E2061, 0x6E2064, 0x6E2073, 0x6E6420, 0x6E6465, + 0x6E6520, 0x6E6720, 0x6E6765, 0x6E7465, 0x722064, 0x726465, 0x726569, 0x736368, 0x737465, 0x742064, 0x746520, 0x74656E, 0x746572, 0x756E64, 0x756E67, 0x766572, +} + +var ngrams_8859_1_es = [64]uint32{ + 0x206120, 0x206361, 0x20636F, 0x206465, 0x20656C, 0x20656E, 0x206573, 0x20696E, 0x206C61, 0x206C6F, 0x207061, 0x20706F, 0x207072, 0x207175, 0x207265, 0x207365, + 0x20756E, 0x207920, 0x612063, 0x612064, 0x612065, 0x61206C, 0x612070, 0x616369, 0x61646F, 0x616C20, 0x617220, 0x617320, 0x6369F3, 0x636F6E, 0x646520, 0x64656C, + 0x646F20, 0x652064, 0x652065, 0x65206C, 0x656C20, 0x656E20, 0x656E74, 0x657320, 0x657374, 0x69656E, 0x69F36E, 0x6C6120, 0x6C6F73, 0x6E2065, 0x6E7465, 0x6F2064, + 0x6F2065, 0x6F6E20, 0x6F7220, 0x6F7320, 0x706172, 0x717565, 0x726120, 0x726573, 0x732064, 0x732065, 0x732070, 0x736520, 0x746520, 0x746F20, 0x756520, 0xF36E20, +} + +var ngrams_8859_1_fr = [64]uint32{ + 0x206175, 0x20636F, 0x206461, 0x206465, 0x206475, 0x20656E, 0x206574, 0x206C61, 0x206C65, 0x207061, 0x20706F, 0x207072, 0x207175, 0x207365, 0x20736F, 0x20756E, + 0x20E020, 0x616E74, 0x617469, 0x636520, 0x636F6E, 0x646520, 0x646573, 0x647520, 0x652061, 0x652063, 0x652064, 0x652065, 0x65206C, 0x652070, 0x652073, 0x656E20, + 0x656E74, 0x657220, 0x657320, 0x657420, 0x657572, 0x696F6E, 0x697320, 0x697420, 0x6C6120, 0x6C6520, 0x6C6573, 0x6D656E, 0x6E2064, 0x6E6520, 0x6E7320, 0x6E7420, + 0x6F6E20, 0x6F6E74, 0x6F7572, 0x717565, 0x72206C, 0x726520, 0x732061, 0x732064, 0x732065, 0x73206C, 0x732070, 0x742064, 0x746520, 0x74696F, 0x756520, 0x757220, +} + +var ngrams_8859_1_it = [64]uint32{ + 0x20616C, 0x206368, 0x20636F, 0x206465, 0x206469, 0x206520, 0x20696C, 0x20696E, 0x206C61, 0x207065, 0x207072, 0x20756E, 0x612063, 0x612064, 0x612070, 0x612073, + 0x61746F, 0x636865, 0x636F6E, 0x64656C, 0x646920, 0x652061, 0x652063, 0x652064, 0x652069, 0x65206C, 0x652070, 0x652073, 0x656C20, 0x656C6C, 0x656E74, 0x657220, + 0x686520, 0x692061, 0x692063, 0x692064, 0x692073, 0x696120, 0x696C20, 0x696E20, 0x696F6E, 0x6C6120, 0x6C6520, 0x6C6920, 0x6C6C61, 0x6E6520, 0x6E6920, 0x6E6F20, + 0x6E7465, 0x6F2061, 0x6F2064, 0x6F2069, 0x6F2073, 0x6F6E20, 0x6F6E65, 0x706572, 0x726120, 0x726520, 0x736920, 0x746120, 0x746520, 0x746920, 0x746F20, 0x7A696F, +} + +var ngrams_8859_1_nl = [64]uint32{ + 0x20616C, 0x206265, 0x206461, 0x206465, 0x206469, 0x206565, 0x20656E, 0x206765, 0x206865, 0x20696E, 0x206D61, 0x206D65, 0x206F70, 0x207465, 0x207661, 0x207665, + 0x20766F, 0x207765, 0x207A69, 0x61616E, 0x616172, 0x616E20, 0x616E64, 0x617220, 0x617420, 0x636874, 0x646520, 0x64656E, 0x646572, 0x652062, 0x652076, 0x65656E, + 0x656572, 0x656E20, 0x657220, 0x657273, 0x657420, 0x67656E, 0x686574, 0x696520, 0x696E20, 0x696E67, 0x697320, 0x6E2062, 0x6E2064, 0x6E2065, 0x6E2068, 0x6E206F, + 0x6E2076, 0x6E6465, 0x6E6720, 0x6F6E64, 0x6F6F72, 0x6F7020, 0x6F7220, 0x736368, 0x737465, 0x742064, 0x746520, 0x74656E, 0x746572, 0x76616E, 0x766572, 0x766F6F, +} + +var ngrams_8859_1_no = [64]uint32{ + 0x206174, 0x206176, 0x206465, 0x20656E, 0x206572, 0x20666F, 0x206861, 0x206920, 0x206D65, 0x206F67, 0x2070E5, 0x207365, 0x20736B, 0x20736F, 0x207374, 0x207469, + 0x207669, 0x20E520, 0x616E64, 0x617220, 0x617420, 0x646520, 0x64656E, 0x646574, 0x652073, 0x656420, 0x656E20, 0x656E65, 0x657220, 0x657265, 0x657420, 0x657474, + 0x666F72, 0x67656E, 0x696B6B, 0x696C20, 0x696E67, 0x6B6520, 0x6B6B65, 0x6C6520, 0x6C6C65, 0x6D6564, 0x6D656E, 0x6E2073, 0x6E6520, 0x6E6720, 0x6E6765, 0x6E6E65, + 0x6F6720, 0x6F6D20, 0x6F7220, 0x70E520, 0x722073, 0x726520, 0x736F6D, 0x737465, 0x742073, 0x746520, 0x74656E, 0x746572, 0x74696C, 0x747420, 0x747465, 0x766572, +} + +var ngrams_8859_1_pt = [64]uint32{ + 0x206120, 0x20636F, 0x206461, 0x206465, 0x20646F, 0x206520, 0x206573, 0x206D61, 0x206E6F, 0x206F20, 0x207061, 0x20706F, 0x207072, 0x207175, 0x207265, 0x207365, + 0x20756D, 0x612061, 0x612063, 0x612064, 0x612070, 0x616465, 0x61646F, 0x616C20, 0x617220, 0x617261, 0x617320, 0x636F6D, 0x636F6E, 0x646120, 0x646520, 0x646F20, + 0x646F73, 0x652061, 0x652064, 0x656D20, 0x656E74, 0x657320, 0x657374, 0x696120, 0x696361, 0x6D656E, 0x6E7465, 0x6E746F, 0x6F2061, 0x6F2063, 0x6F2064, 0x6F2065, + 0x6F2070, 0x6F7320, 0x706172, 0x717565, 0x726120, 0x726573, 0x732061, 0x732064, 0x732065, 0x732070, 0x737461, 0x746520, 0x746F20, 0x756520, 0xE36F20, 0xE7E36F, +} + +var ngrams_8859_1_sv = [64]uint32{ + 0x206174, 0x206176, 0x206465, 0x20656E, 0x2066F6, 0x206861, 0x206920, 0x20696E, 0x206B6F, 0x206D65, 0x206F63, 0x2070E5, 0x20736B, 0x20736F, 0x207374, 0x207469, + 0x207661, 0x207669, 0x20E472, 0x616465, 0x616E20, 0x616E64, 0x617220, 0x617474, 0x636820, 0x646520, 0x64656E, 0x646572, 0x646574, 0x656420, 0x656E20, 0x657220, + 0x657420, 0x66F672, 0x67656E, 0x696C6C, 0x696E67, 0x6B6120, 0x6C6C20, 0x6D6564, 0x6E2073, 0x6E6120, 0x6E6465, 0x6E6720, 0x6E6765, 0x6E696E, 0x6F6368, 0x6F6D20, + 0x6F6E20, 0x70E520, 0x722061, 0x722073, 0x726120, 0x736B61, 0x736F6D, 0x742073, 0x746120, 0x746520, 0x746572, 0x74696C, 0x747420, 0x766172, 0xE47220, 0xF67220, +} + +func newRecognizer_8859_1(language string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "ISO-8859-1", + hasC1ByteCharset: "windows-1252", + language: language, + charMap: &charMap_8859_1, + ngram: ngram, + } +} + +func newRecognizer_8859_1_en() *recognizerSingleByte { + return newRecognizer_8859_1("en", &ngrams_8859_1_en) +} +func newRecognizer_8859_1_da() *recognizerSingleByte { + return newRecognizer_8859_1("da", &ngrams_8859_1_da) +} +func newRecognizer_8859_1_de() *recognizerSingleByte { + return newRecognizer_8859_1("de", &ngrams_8859_1_de) +} +func newRecognizer_8859_1_es() *recognizerSingleByte { + return newRecognizer_8859_1("es", &ngrams_8859_1_es) +} +func newRecognizer_8859_1_fr() *recognizerSingleByte { + return newRecognizer_8859_1("fr", &ngrams_8859_1_fr) +} +func newRecognizer_8859_1_it() *recognizerSingleByte { + return newRecognizer_8859_1("it", &ngrams_8859_1_it) +} +func newRecognizer_8859_1_nl() *recognizerSingleByte { + return newRecognizer_8859_1("nl", &ngrams_8859_1_nl) +} +func newRecognizer_8859_1_no() *recognizerSingleByte { + return newRecognizer_8859_1("no", &ngrams_8859_1_no) +} +func newRecognizer_8859_1_pt() *recognizerSingleByte { + return newRecognizer_8859_1("pt", &ngrams_8859_1_pt) +} +func newRecognizer_8859_1_sv() *recognizerSingleByte { + return newRecognizer_8859_1("sv", &ngrams_8859_1_sv) +} + +var charMap_8859_2 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0xB1, 0x20, 0xB3, 0x20, 0xB5, 0xB6, 0x20, + 0x20, 0xB9, 0xBA, 0xBB, 0xBC, 0x20, 0xBE, 0xBF, + 0x20, 0xB1, 0x20, 0xB3, 0x20, 0xB5, 0xB6, 0xB7, + 0x20, 0xB9, 0xBA, 0xBB, 0xBC, 0x20, 0xBE, 0xBF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0x20, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xDF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0x20, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0x20, +} + +var ngrams_8859_2_cs = [64]uint32{ + 0x206120, 0x206279, 0x20646F, 0x206A65, 0x206E61, 0x206E65, 0x206F20, 0x206F64, 0x20706F, 0x207072, 0x2070F8, 0x20726F, 0x207365, 0x20736F, 0x207374, 0x20746F, + 0x207620, 0x207679, 0x207A61, 0x612070, 0x636520, 0x636820, 0x652070, 0x652073, 0x652076, 0x656D20, 0x656EED, 0x686F20, 0x686F64, 0x697374, 0x6A6520, 0x6B7465, + 0x6C6520, 0x6C6920, 0x6E6120, 0x6EE920, 0x6EEC20, 0x6EED20, 0x6F2070, 0x6F646E, 0x6F6A69, 0x6F7374, 0x6F7520, 0x6F7661, 0x706F64, 0x706F6A, 0x70726F, 0x70F865, + 0x736520, 0x736F75, 0x737461, 0x737469, 0x73746E, 0x746572, 0x746EED, 0x746F20, 0x752070, 0xBE6520, 0xE16EED, 0xE9686F, 0xED2070, 0xED2073, 0xED6D20, 0xF86564, +} + +var ngrams_8859_2_hu = [64]uint32{ + 0x206120, 0x20617A, 0x206265, 0x206567, 0x20656C, 0x206665, 0x206861, 0x20686F, 0x206973, 0x206B65, 0x206B69, 0x206BF6, 0x206C65, 0x206D61, 0x206D65, 0x206D69, + 0x206E65, 0x20737A, 0x207465, 0x20E973, 0x612061, 0x61206B, 0x61206D, 0x612073, 0x616B20, 0x616E20, 0x617A20, 0x62616E, 0x62656E, 0x656779, 0x656B20, 0x656C20, + 0x656C65, 0x656D20, 0x656E20, 0x657265, 0x657420, 0x657465, 0x657474, 0x677920, 0x686F67, 0x696E74, 0x697320, 0x6B2061, 0x6BF67A, 0x6D6567, 0x6D696E, 0x6E2061, + 0x6E616B, 0x6E656B, 0x6E656D, 0x6E7420, 0x6F6779, 0x732061, 0x737A65, 0x737A74, 0x737AE1, 0x73E967, 0x742061, 0x747420, 0x74E173, 0x7A6572, 0xE16E20, 0xE97320, +} + +var ngrams_8859_2_pl = [64]uint32{ + 0x20637A, 0x20646F, 0x206920, 0x206A65, 0x206B6F, 0x206D61, 0x206D69, 0x206E61, 0x206E69, 0x206F64, 0x20706F, 0x207072, 0x207369, 0x207720, 0x207769, 0x207779, + 0x207A20, 0x207A61, 0x612070, 0x612077, 0x616E69, 0x636820, 0x637A65, 0x637A79, 0x646F20, 0x647A69, 0x652070, 0x652073, 0x652077, 0x65207A, 0x65676F, 0x656A20, + 0x656D20, 0x656E69, 0x676F20, 0x696120, 0x696520, 0x69656A, 0x6B6120, 0x6B6920, 0x6B6965, 0x6D6965, 0x6E6120, 0x6E6961, 0x6E6965, 0x6F2070, 0x6F7761, 0x6F7769, + 0x706F6C, 0x707261, 0x70726F, 0x70727A, 0x727A65, 0x727A79, 0x7369EA, 0x736B69, 0x737461, 0x776965, 0x796368, 0x796D20, 0x7A6520, 0x7A6965, 0x7A7920, 0xF37720, +} + +var ngrams_8859_2_ro = [64]uint32{ + 0x206120, 0x206163, 0x206361, 0x206365, 0x20636F, 0x206375, 0x206465, 0x206469, 0x206C61, 0x206D61, 0x207065, 0x207072, 0x207365, 0x2073E3, 0x20756E, 0x20BA69, + 0x20EE6E, 0x612063, 0x612064, 0x617265, 0x617420, 0x617465, 0x617520, 0x636172, 0x636F6E, 0x637520, 0x63E320, 0x646520, 0x652061, 0x652063, 0x652064, 0x652070, + 0x652073, 0x656120, 0x656920, 0x656C65, 0x656E74, 0x657374, 0x692061, 0x692063, 0x692064, 0x692070, 0x696520, 0x696920, 0x696E20, 0x6C6120, 0x6C6520, 0x6C6F72, + 0x6C7569, 0x6E6520, 0x6E7472, 0x6F7220, 0x70656E, 0x726520, 0x726561, 0x727520, 0x73E320, 0x746520, 0x747275, 0x74E320, 0x756920, 0x756C20, 0xBA6920, 0xEE6E20, +} + +func newRecognizer_8859_2(language string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "ISO-8859-2", + hasC1ByteCharset: "windows-1250", + language: language, + charMap: &charMap_8859_2, + ngram: ngram, + } +} + +func newRecognizer_8859_2_cs() *recognizerSingleByte { + return newRecognizer_8859_2("cs", &ngrams_8859_2_cs) +} +func newRecognizer_8859_2_hu() *recognizerSingleByte { + return newRecognizer_8859_2("hu", &ngrams_8859_2_hu) +} +func newRecognizer_8859_2_pl() *recognizerSingleByte { + return newRecognizer_8859_2("pl", &ngrams_8859_2_pl) +} +func newRecognizer_8859_2_ro() *recognizerSingleByte { + return newRecognizer_8859_2("ro", &ngrams_8859_2_ro) +} + +var charMap_8859_5 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0x20, 0xFE, 0xFF, + 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, + 0xD8, 0xD9, 0xDA, 0xDB, 0xDC, 0xDD, 0xDE, 0xDF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, + 0xD8, 0xD9, 0xDA, 0xDB, 0xDC, 0xDD, 0xDE, 0xDF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0x20, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0x20, 0xFE, 0xFF, +} + +var ngrams_8859_5_ru = [64]uint32{ + 0x20D220, 0x20D2DE, 0x20D4DE, 0x20D7D0, 0x20D820, 0x20DAD0, 0x20DADE, 0x20DDD0, 0x20DDD5, 0x20DED1, 0x20DFDE, 0x20DFE0, 0x20E0D0, 0x20E1DE, 0x20E1E2, 0x20E2DE, + 0x20E7E2, 0x20EDE2, 0xD0DDD8, 0xD0E2EC, 0xD3DE20, 0xD5DBEC, 0xD5DDD8, 0xD5E1E2, 0xD5E220, 0xD820DF, 0xD8D520, 0xD8D820, 0xD8EF20, 0xDBD5DD, 0xDBD820, 0xDBECDD, + 0xDDD020, 0xDDD520, 0xDDD8D5, 0xDDD8EF, 0xDDDE20, 0xDDDED2, 0xDE20D2, 0xDE20DF, 0xDE20E1, 0xDED220, 0xDED2D0, 0xDED3DE, 0xDED920, 0xDEDBEC, 0xDEDC20, 0xDEE1E2, + 0xDFDEDB, 0xDFE0D5, 0xDFE0D8, 0xDFE0DE, 0xE0D0D2, 0xE0D5D4, 0xE1E2D0, 0xE1E2D2, 0xE1E2D8, 0xE1EF20, 0xE2D5DB, 0xE2DE20, 0xE2DEE0, 0xE2EC20, 0xE7E2DE, 0xEBE520, +} + +func newRecognizer_8859_5(language string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "ISO-8859-5", + language: language, + charMap: &charMap_8859_5, + ngram: ngram, + } +} + +func newRecognizer_8859_5_ru() *recognizerSingleByte { + return newRecognizer_8859_5("ru", &ngrams_8859_5_ru) +} + +var charMap_8859_6 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, + 0xC8, 0xC9, 0xCA, 0xCB, 0xCC, 0xCD, 0xCE, 0xCF, + 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, + 0xD8, 0xD9, 0xDA, 0x20, 0x20, 0x20, 0x20, 0x20, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, +} + +var ngrams_8859_6_ar = [64]uint32{ + 0x20C7E4, 0x20C7E6, 0x20C8C7, 0x20D9E4, 0x20E1EA, 0x20E4E4, 0x20E5E6, 0x20E8C7, 0xC720C7, 0xC7C120, 0xC7CA20, 0xC7D120, 0xC7E420, 0xC7E4C3, 0xC7E4C7, 0xC7E4C8, + 0xC7E4CA, 0xC7E4CC, 0xC7E4CD, 0xC7E4CF, 0xC7E4D3, 0xC7E4D9, 0xC7E4E2, 0xC7E4E5, 0xC7E4E8, 0xC7E4EA, 0xC7E520, 0xC7E620, 0xC7E6CA, 0xC820C7, 0xC920C7, 0xC920E1, + 0xC920E4, 0xC920E5, 0xC920E8, 0xCA20C7, 0xCF20C7, 0xCFC920, 0xD120C7, 0xD1C920, 0xD320C7, 0xD920C7, 0xD9E4E9, 0xE1EA20, 0xE420C7, 0xE4C920, 0xE4E920, 0xE4EA20, + 0xE520C7, 0xE5C720, 0xE5C920, 0xE5E620, 0xE620C7, 0xE720C7, 0xE7C720, 0xE8C7E4, 0xE8E620, 0xE920C7, 0xEA20C7, 0xEA20E5, 0xEA20E8, 0xEAC920, 0xEAD120, 0xEAE620, +} + +func newRecognizer_8859_6(language string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "ISO-8859-6", + language: language, + charMap: &charMap_8859_6, + ngram: ngram, + } +} + +func newRecognizer_8859_6_ar() *recognizerSingleByte { + return newRecognizer_8859_6("ar", &ngrams_8859_6_ar) +} + +var charMap_8859_7 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0xA1, 0xA2, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0xDC, 0x20, + 0xDD, 0xDE, 0xDF, 0x20, 0xFC, 0x20, 0xFD, 0xFE, + 0xC0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0x20, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, + 0xF8, 0xF9, 0xFA, 0xFB, 0xDC, 0xDD, 0xDE, 0xDF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0x20, +} + +var ngrams_8859_7_el = [64]uint32{ + 0x20E1ED, 0x20E1F0, 0x20E3E9, 0x20E4E9, 0x20E5F0, 0x20E720, 0x20EAE1, 0x20ECE5, 0x20EDE1, 0x20EF20, 0x20F0E1, 0x20F0EF, 0x20F0F1, 0x20F3F4, 0x20F3F5, 0x20F4E7, + 0x20F4EF, 0xDFE120, 0xE120E1, 0xE120F4, 0xE1E920, 0xE1ED20, 0xE1F0FC, 0xE1F220, 0xE3E9E1, 0xE5E920, 0xE5F220, 0xE720F4, 0xE7ED20, 0xE7F220, 0xE920F4, 0xE9E120, + 0xE9EADE, 0xE9F220, 0xEAE1E9, 0xEAE1F4, 0xECE520, 0xED20E1, 0xED20E5, 0xED20F0, 0xEDE120, 0xEFF220, 0xEFF520, 0xF0EFF5, 0xF0F1EF, 0xF0FC20, 0xF220E1, 0xF220E5, + 0xF220EA, 0xF220F0, 0xF220F4, 0xF3E520, 0xF3E720, 0xF3F4EF, 0xF4E120, 0xF4E1E9, 0xF4E7ED, 0xF4E7F2, 0xF4E9EA, 0xF4EF20, 0xF4EFF5, 0xF4F9ED, 0xF9ED20, 0xFEED20, +} + +func newRecognizer_8859_7(language string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "ISO-8859-7", + hasC1ByteCharset: "windows-1253", + language: language, + charMap: &charMap_8859_7, + ngram: ngram, + } +} + +func newRecognizer_8859_7_el() *recognizerSingleByte { + return newRecognizer_8859_7("el", &ngrams_8859_7_el) +} + +var charMap_8859_8 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0xB5, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, + 0xF8, 0xF9, 0xFA, 0x20, 0x20, 0x20, 0x20, 0x20, +} + +var ngrams_8859_8_I_he = [64]uint32{ + 0x20E0E5, 0x20E0E7, 0x20E0E9, 0x20E0FA, 0x20E1E9, 0x20E1EE, 0x20E4E0, 0x20E4E5, 0x20E4E9, 0x20E4EE, 0x20E4F2, 0x20E4F9, 0x20E4FA, 0x20ECE0, 0x20ECE4, 0x20EEE0, + 0x20F2EC, 0x20F9EC, 0xE0FA20, 0xE420E0, 0xE420E1, 0xE420E4, 0xE420EC, 0xE420EE, 0xE420F9, 0xE4E5E0, 0xE5E020, 0xE5ED20, 0xE5EF20, 0xE5F820, 0xE5FA20, 0xE920E4, + 0xE9E420, 0xE9E5FA, 0xE9E9ED, 0xE9ED20, 0xE9EF20, 0xE9F820, 0xE9FA20, 0xEC20E0, 0xEC20E4, 0xECE020, 0xECE420, 0xED20E0, 0xED20E1, 0xED20E4, 0xED20EC, 0xED20EE, + 0xED20F9, 0xEEE420, 0xEF20E4, 0xF0E420, 0xF0E920, 0xF0E9ED, 0xF2EC20, 0xF820E4, 0xF8E9ED, 0xF9EC20, 0xFA20E0, 0xFA20E1, 0xFA20E4, 0xFA20EC, 0xFA20EE, 0xFA20F9, +} + +var ngrams_8859_8_he = [64]uint32{ + 0x20E0E5, 0x20E0EC, 0x20E4E9, 0x20E4EC, 0x20E4EE, 0x20E4F0, 0x20E9F0, 0x20ECF2, 0x20ECF9, 0x20EDE5, 0x20EDE9, 0x20EFE5, 0x20EFE9, 0x20F8E5, 0x20F8E9, 0x20FAE0, + 0x20FAE5, 0x20FAE9, 0xE020E4, 0xE020EC, 0xE020ED, 0xE020FA, 0xE0E420, 0xE0E5E4, 0xE0EC20, 0xE0EE20, 0xE120E4, 0xE120ED, 0xE120FA, 0xE420E4, 0xE420E9, 0xE420EC, + 0xE420ED, 0xE420EF, 0xE420F8, 0xE420FA, 0xE4EC20, 0xE5E020, 0xE5E420, 0xE7E020, 0xE9E020, 0xE9E120, 0xE9E420, 0xEC20E4, 0xEC20ED, 0xEC20FA, 0xECF220, 0xECF920, + 0xEDE9E9, 0xEDE9F0, 0xEDE9F8, 0xEE20E4, 0xEE20ED, 0xEE20FA, 0xEEE120, 0xEEE420, 0xF2E420, 0xF920E4, 0xF920ED, 0xF920FA, 0xF9E420, 0xFAE020, 0xFAE420, 0xFAE5E9, +} + +func newRecognizer_8859_8(language string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "ISO-8859-8", + hasC1ByteCharset: "windows-1255", + language: language, + charMap: &charMap_8859_8, + ngram: ngram, + } +} + +func newRecognizer_8859_8_I_he() *recognizerSingleByte { + r := newRecognizer_8859_8("he", &ngrams_8859_8_I_he) + r.charset = "ISO-8859-8-I" + return r +} + +func newRecognizer_8859_8_he() *recognizerSingleByte { + return newRecognizer_8859_8("he", &ngrams_8859_8_he) +} + +var charMap_8859_9 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0xAA, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0xB5, 0x20, 0x20, + 0x20, 0x20, 0xBA, 0x20, 0x20, 0x20, 0x20, 0x20, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0x20, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0x69, 0xFE, 0xDF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0x20, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF, +} + +var ngrams_8859_9_tr = [64]uint32{ + 0x206261, 0x206269, 0x206275, 0x206461, 0x206465, 0x206765, 0x206861, 0x20696C, 0x206B61, 0x206B6F, 0x206D61, 0x206F6C, 0x207361, 0x207461, 0x207665, 0x207961, + 0x612062, 0x616B20, 0x616C61, 0x616D61, 0x616E20, 0x616EFD, 0x617220, 0x617261, 0x6172FD, 0x6173FD, 0x617961, 0x626972, 0x646120, 0x646520, 0x646920, 0x652062, + 0x65206B, 0x656469, 0x656E20, 0x657220, 0x657269, 0x657369, 0x696C65, 0x696E20, 0x696E69, 0x697220, 0x6C616E, 0x6C6172, 0x6C6520, 0x6C6572, 0x6E2061, 0x6E2062, + 0x6E206B, 0x6E6461, 0x6E6465, 0x6E6520, 0x6E6920, 0x6E696E, 0x6EFD20, 0x72696E, 0x72FD6E, 0x766520, 0x796120, 0x796F72, 0xFD6E20, 0xFD6E64, 0xFD6EFD, 0xFDF0FD, +} + +func newRecognizer_8859_9(language string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "ISO-8859-9", + hasC1ByteCharset: "windows-1254", + language: language, + charMap: &charMap_8859_9, + ngram: ngram, + } +} + +func newRecognizer_8859_9_tr() *recognizerSingleByte { + return newRecognizer_8859_9("tr", &ngrams_8859_9_tr) +} + +var charMap_windows_1256 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x81, 0x20, 0x83, 0x20, 0x20, 0x20, 0x20, + 0x88, 0x20, 0x8A, 0x20, 0x9C, 0x8D, 0x8E, 0x8F, + 0x90, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x98, 0x20, 0x9A, 0x20, 0x9C, 0x20, 0x20, 0x9F, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0xAA, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0xB5, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0xC0, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, + 0xC8, 0xC9, 0xCA, 0xCB, 0xCC, 0xCD, 0xCE, 0xCF, + 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0x20, + 0xD8, 0xD9, 0xDA, 0xDB, 0xDC, 0xDD, 0xDE, 0xDF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0x20, 0x20, 0x20, 0x20, 0xF4, 0x20, 0x20, 0x20, + 0x20, 0xF9, 0x20, 0xFB, 0xFC, 0x20, 0x20, 0xFF, +} + +var ngrams_windows_1256 = [64]uint32{ + 0x20C7E1, 0x20C7E4, 0x20C8C7, 0x20DAE1, 0x20DDED, 0x20E1E1, 0x20E3E4, 0x20E6C7, 0xC720C7, 0xC7C120, 0xC7CA20, 0xC7D120, 0xC7E120, 0xC7E1C3, 0xC7E1C7, 0xC7E1C8, + 0xC7E1CA, 0xC7E1CC, 0xC7E1CD, 0xC7E1CF, 0xC7E1D3, 0xC7E1DA, 0xC7E1DE, 0xC7E1E3, 0xC7E1E6, 0xC7E1ED, 0xC7E320, 0xC7E420, 0xC7E4CA, 0xC820C7, 0xC920C7, 0xC920DD, + 0xC920E1, 0xC920E3, 0xC920E6, 0xCA20C7, 0xCF20C7, 0xCFC920, 0xD120C7, 0xD1C920, 0xD320C7, 0xDA20C7, 0xDAE1EC, 0xDDED20, 0xE120C7, 0xE1C920, 0xE1EC20, 0xE1ED20, + 0xE320C7, 0xE3C720, 0xE3C920, 0xE3E420, 0xE420C7, 0xE520C7, 0xE5C720, 0xE6C7E1, 0xE6E420, 0xEC20C7, 0xED20C7, 0xED20E3, 0xED20E6, 0xEDC920, 0xEDD120, 0xEDE420, +} + +func newRecognizer_windows_1256() *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "windows-1256", + language: "ar", + charMap: &charMap_windows_1256, + ngram: &ngrams_windows_1256, + } +} + +var charMap_windows_1251 = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x90, 0x83, 0x20, 0x83, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x9A, 0x20, 0x9C, 0x9D, 0x9E, 0x9F, + 0x90, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x9A, 0x20, 0x9C, 0x9D, 0x9E, 0x9F, + 0x20, 0xA2, 0xA2, 0xBC, 0x20, 0xB4, 0x20, 0x20, + 0xB8, 0x20, 0xBA, 0x20, 0x20, 0x20, 0x20, 0xBF, + 0x20, 0x20, 0xB3, 0xB3, 0xB4, 0xB5, 0x20, 0x20, + 0xB8, 0x20, 0xBA, 0x20, 0xBC, 0xBE, 0xBE, 0xBF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF, + 0xE0, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, + 0xE8, 0xE9, 0xEA, 0xEB, 0xEC, 0xED, 0xEE, 0xEF, + 0xF0, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, + 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF, +} + +var ngrams_windows_1251 = [64]uint32{ + 0x20E220, 0x20E2EE, 0x20E4EE, 0x20E7E0, 0x20E820, 0x20EAE0, 0x20EAEE, 0x20EDE0, 0x20EDE5, 0x20EEE1, 0x20EFEE, 0x20EFF0, 0x20F0E0, 0x20F1EE, 0x20F1F2, 0x20F2EE, + 0x20F7F2, 0x20FDF2, 0xE0EDE8, 0xE0F2FC, 0xE3EE20, 0xE5EBFC, 0xE5EDE8, 0xE5F1F2, 0xE5F220, 0xE820EF, 0xE8E520, 0xE8E820, 0xE8FF20, 0xEBE5ED, 0xEBE820, 0xEBFCED, + 0xEDE020, 0xEDE520, 0xEDE8E5, 0xEDE8FF, 0xEDEE20, 0xEDEEE2, 0xEE20E2, 0xEE20EF, 0xEE20F1, 0xEEE220, 0xEEE2E0, 0xEEE3EE, 0xEEE920, 0xEEEBFC, 0xEEEC20, 0xEEF1F2, + 0xEFEEEB, 0xEFF0E5, 0xEFF0E8, 0xEFF0EE, 0xF0E0E2, 0xF0E5E4, 0xF1F2E0, 0xF1F2E2, 0xF1F2E8, 0xF1FF20, 0xF2E5EB, 0xF2EE20, 0xF2EEF0, 0xF2FC20, 0xF7F2EE, 0xFBF520, +} + +func newRecognizer_windows_1251() *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "windows-1251", + language: "ar", + charMap: &charMap_windows_1251, + ngram: &ngrams_windows_1251, + } +} + +var charMap_KOI8_R = [256]byte{ + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x00, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7A, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0xA3, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0xA3, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0xC0, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, + 0xC8, 0xC9, 0xCA, 0xCB, 0xCC, 0xCD, 0xCE, 0xCF, + 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, + 0xD8, 0xD9, 0xDA, 0xDB, 0xDC, 0xDD, 0xDE, 0xDF, + 0xC0, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, + 0xC8, 0xC9, 0xCA, 0xCB, 0xCC, 0xCD, 0xCE, 0xCF, + 0xD0, 0xD1, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, + 0xD8, 0xD9, 0xDA, 0xDB, 0xDC, 0xDD, 0xDE, 0xDF, +} + +var ngrams_KOI8_R = [64]uint32{ + 0x20C4CF, 0x20C920, 0x20CBC1, 0x20CBCF, 0x20CEC1, 0x20CEC5, 0x20CFC2, 0x20D0CF, 0x20D0D2, 0x20D2C1, 0x20D3CF, 0x20D3D4, 0x20D4CF, 0x20D720, 0x20D7CF, 0x20DAC1, + 0x20DCD4, 0x20DED4, 0xC1CEC9, 0xC1D4D8, 0xC5CCD8, 0xC5CEC9, 0xC5D3D4, 0xC5D420, 0xC7CF20, 0xC920D0, 0xC9C520, 0xC9C920, 0xC9D120, 0xCCC5CE, 0xCCC920, 0xCCD8CE, + 0xCEC120, 0xCEC520, 0xCEC9C5, 0xCEC9D1, 0xCECF20, 0xCECFD7, 0xCF20D0, 0xCF20D3, 0xCF20D7, 0xCFC7CF, 0xCFCA20, 0xCFCCD8, 0xCFCD20, 0xCFD3D4, 0xCFD720, 0xCFD7C1, + 0xD0CFCC, 0xD0D2C5, 0xD0D2C9, 0xD0D2CF, 0xD2C1D7, 0xD2C5C4, 0xD3D120, 0xD3D4C1, 0xD3D4C9, 0xD3D4D7, 0xD4C5CC, 0xD4CF20, 0xD4CFD2, 0xD4D820, 0xD9C820, 0xDED4CF, +} + +func newRecognizer_KOI8_R() *recognizerSingleByte { + return &recognizerSingleByte{ + charset: "KOI8-R", + language: "ru", + charMap: &charMap_KOI8_R, + ngram: &ngrams_KOI8_R, + } +} + +var charMap_IBM424_he = [256]byte{ + /* -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -A -B -C -D -E -F */ + /* 0- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 1- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 2- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 3- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 4- */ 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 5- */ 0x40, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 6- */ 0x40, 0x40, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 7- */ 0x40, 0x71, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x00, 0x40, 0x40, + /* 8- */ 0x40, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 9- */ 0x40, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* A- */ 0xA0, 0x40, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* B- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* C- */ 0x40, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* D- */ 0x40, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* E- */ 0x40, 0x40, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* F- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, +} + +var ngrams_IBM424_he_rtl = [64]uint32{ + 0x404146, 0x404148, 0x404151, 0x404171, 0x404251, 0x404256, 0x404541, 0x404546, 0x404551, 0x404556, 0x404562, 0x404569, 0x404571, 0x405441, 0x405445, 0x405641, + 0x406254, 0x406954, 0x417140, 0x454041, 0x454042, 0x454045, 0x454054, 0x454056, 0x454069, 0x454641, 0x464140, 0x465540, 0x465740, 0x466840, 0x467140, 0x514045, + 0x514540, 0x514671, 0x515155, 0x515540, 0x515740, 0x516840, 0x517140, 0x544041, 0x544045, 0x544140, 0x544540, 0x554041, 0x554042, 0x554045, 0x554054, 0x554056, + 0x554069, 0x564540, 0x574045, 0x584540, 0x585140, 0x585155, 0x625440, 0x684045, 0x685155, 0x695440, 0x714041, 0x714042, 0x714045, 0x714054, 0x714056, 0x714069, +} + +var ngrams_IBM424_he_ltr = [64]uint32{ + 0x404146, 0x404154, 0x404551, 0x404554, 0x404556, 0x404558, 0x405158, 0x405462, 0x405469, 0x405546, 0x405551, 0x405746, 0x405751, 0x406846, 0x406851, 0x407141, + 0x407146, 0x407151, 0x414045, 0x414054, 0x414055, 0x414071, 0x414540, 0x414645, 0x415440, 0x415640, 0x424045, 0x424055, 0x424071, 0x454045, 0x454051, 0x454054, + 0x454055, 0x454057, 0x454068, 0x454071, 0x455440, 0x464140, 0x464540, 0x484140, 0x514140, 0x514240, 0x514540, 0x544045, 0x544055, 0x544071, 0x546240, 0x546940, + 0x555151, 0x555158, 0x555168, 0x564045, 0x564055, 0x564071, 0x564240, 0x564540, 0x624540, 0x694045, 0x694055, 0x694071, 0x694540, 0x714140, 0x714540, 0x714651, +} + +func newRecognizer_IBM424_he(charset string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: charset, + language: "he", + charMap: &charMap_IBM424_he, + ngram: ngram, + } +} + +func newRecognizer_IBM424_he_rtl() *recognizerSingleByte { + return newRecognizer_IBM424_he("IBM424_rtl", &ngrams_IBM424_he_rtl) +} + +func newRecognizer_IBM424_he_ltr() *recognizerSingleByte { + return newRecognizer_IBM424_he("IBM424_ltr", &ngrams_IBM424_he_ltr) +} + +var charMap_IBM420_ar = [256]byte{ + /* -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -A -B -C -D -E -F */ + /* 0- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 1- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 2- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 3- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 4- */ 0x40, 0x40, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 5- */ 0x40, 0x51, 0x52, 0x40, 0x40, 0x55, 0x56, 0x57, 0x58, 0x59, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 6- */ 0x40, 0x40, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 7- */ 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, + /* 8- */ 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x8B, 0x8C, 0x8D, 0x8E, 0x8F, + /* 9- */ 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0x9B, 0x9C, 0x9D, 0x9E, 0x9F, + /* A- */ 0xA0, 0x40, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xAB, 0xAC, 0xAD, 0xAE, 0xAF, + /* B- */ 0xB0, 0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0x40, 0x40, 0xB8, 0xB9, 0xBA, 0xBB, 0xBC, 0xBD, 0xBE, 0xBF, + /* C- */ 0x40, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x40, 0xCB, 0x40, 0xCD, 0x40, 0xCF, + /* D- */ 0x40, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0xDA, 0xDB, 0xDC, 0xDD, 0xDE, 0xDF, + /* E- */ 0x40, 0x40, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xEA, 0xEB, 0x40, 0xED, 0xEE, 0xEF, + /* F- */ 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0x40, 0xFB, 0xFC, 0xFD, 0xFE, 0x40, +} + +var ngrams_IBM420_ar_rtl = [64]uint32{ + 0x4056B1, 0x4056BD, 0x405856, 0x409AB1, 0x40ABDC, 0x40B1B1, 0x40BBBD, 0x40CF56, 0x564056, 0x564640, 0x566340, 0x567540, 0x56B140, 0x56B149, 0x56B156, 0x56B158, + 0x56B163, 0x56B167, 0x56B169, 0x56B173, 0x56B178, 0x56B19A, 0x56B1AD, 0x56B1BB, 0x56B1CF, 0x56B1DC, 0x56BB40, 0x56BD40, 0x56BD63, 0x584056, 0x624056, 0x6240AB, + 0x6240B1, 0x6240BB, 0x6240CF, 0x634056, 0x734056, 0x736240, 0x754056, 0x756240, 0x784056, 0x9A4056, 0x9AB1DA, 0xABDC40, 0xB14056, 0xB16240, 0xB1DA40, 0xB1DC40, + 0xBB4056, 0xBB5640, 0xBB6240, 0xBBBD40, 0xBD4056, 0xBF4056, 0xBF5640, 0xCF56B1, 0xCFBD40, 0xDA4056, 0xDC4056, 0xDC40BB, 0xDC40CF, 0xDC6240, 0xDC7540, 0xDCBD40, +} + +var ngrams_IBM420_ar_ltr = [64]uint32{ + 0x404656, 0x4056BB, 0x4056BF, 0x406273, 0x406275, 0x4062B1, 0x4062BB, 0x4062DC, 0x406356, 0x407556, 0x4075DC, 0x40B156, 0x40BB56, 0x40BD56, 0x40BDBB, 0x40BDCF, + 0x40BDDC, 0x40DAB1, 0x40DCAB, 0x40DCB1, 0x49B156, 0x564056, 0x564058, 0x564062, 0x564063, 0x564073, 0x564075, 0x564078, 0x56409A, 0x5640B1, 0x5640BB, 0x5640BD, + 0x5640BF, 0x5640DA, 0x5640DC, 0x565840, 0x56B156, 0x56CF40, 0x58B156, 0x63B156, 0x63BD56, 0x67B156, 0x69B156, 0x73B156, 0x78B156, 0x9AB156, 0xAB4062, 0xADB156, + 0xB14062, 0xB15640, 0xB156CF, 0xB19A40, 0xB1B140, 0xBB4062, 0xBB40DC, 0xBBB156, 0xBD5640, 0xBDBB40, 0xCF4062, 0xCF40DC, 0xCFB156, 0xDAB19A, 0xDCAB40, 0xDCB156, +} + +func newRecognizer_IBM420_ar(charset string, ngram *[64]uint32) *recognizerSingleByte { + return &recognizerSingleByte{ + charset: charset, + language: "ar", + charMap: &charMap_IBM420_ar, + ngram: ngram, + } +} + +func newRecognizer_IBM420_ar_rtl() *recognizerSingleByte { + return newRecognizer_IBM420_ar("IBM420_rtl", &ngrams_IBM420_ar_rtl) +} + +func newRecognizer_IBM420_ar_ltr() *recognizerSingleByte { + return newRecognizer_IBM420_ar("IBM420_ltr", &ngrams_IBM420_ar_ltr) +} diff --git a/vendor/github.com/gogs/chardet/unicode.go b/vendor/github.com/gogs/chardet/unicode.go new file mode 100644 index 0000000000..6f9fa9e67f --- /dev/null +++ b/vendor/github.com/gogs/chardet/unicode.go @@ -0,0 +1,103 @@ +package chardet + +import ( + "bytes" +) + +var ( + utf16beBom = []byte{0xFE, 0xFF} + utf16leBom = []byte{0xFF, 0xFE} + utf32beBom = []byte{0x00, 0x00, 0xFE, 0xFF} + utf32leBom = []byte{0xFF, 0xFE, 0x00, 0x00} +) + +type recognizerUtf16be struct { +} + +func newRecognizer_utf16be() *recognizerUtf16be { + return &recognizerUtf16be{} +} + +func (*recognizerUtf16be) Match(input *recognizerInput) (output recognizerOutput) { + output = recognizerOutput{ + Charset: "UTF-16BE", + } + if bytes.HasPrefix(input.raw, utf16beBom) { + output.Confidence = 100 + } + return +} + +type recognizerUtf16le struct { +} + +func newRecognizer_utf16le() *recognizerUtf16le { + return &recognizerUtf16le{} +} + +func (*recognizerUtf16le) Match(input *recognizerInput) (output recognizerOutput) { + output = recognizerOutput{ + Charset: "UTF-16LE", + } + if bytes.HasPrefix(input.raw, utf16leBom) && !bytes.HasPrefix(input.raw, utf32leBom) { + output.Confidence = 100 + } + return +} + +type recognizerUtf32 struct { + name string + bom []byte + decodeChar func(input []byte) uint32 +} + +func decodeUtf32be(input []byte) uint32 { + return uint32(input[0])<<24 | uint32(input[1])<<16 | uint32(input[2])<<8 | uint32(input[3]) +} + +func decodeUtf32le(input []byte) uint32 { + return uint32(input[3])<<24 | uint32(input[2])<<16 | uint32(input[1])<<8 | uint32(input[0]) +} + +func newRecognizer_utf32be() *recognizerUtf32 { + return &recognizerUtf32{ + "UTF-32BE", + utf32beBom, + decodeUtf32be, + } +} + +func newRecognizer_utf32le() *recognizerUtf32 { + return &recognizerUtf32{ + "UTF-32LE", + utf32leBom, + decodeUtf32le, + } +} + +func (r *recognizerUtf32) Match(input *recognizerInput) (output recognizerOutput) { + output = recognizerOutput{ + Charset: r.name, + } + hasBom := bytes.HasPrefix(input.raw, r.bom) + var numValid, numInvalid uint32 + for b := input.raw; len(b) >= 4; b = b[4:] { + if c := r.decodeChar(b); c >= 0x10FFFF || (c >= 0xD800 && c <= 0xDFFF) { + numInvalid++ + } else { + numValid++ + } + } + if hasBom && numInvalid == 0 { + output.Confidence = 100 + } else if hasBom && numValid > numInvalid*10 { + output.Confidence = 80 + } else if numValid > 3 && numInvalid == 0 { + output.Confidence = 100 + } else if numValid > 0 && numInvalid == 0 { + output.Confidence = 80 + } else if numValid > numInvalid*10 { + output.Confidence = 25 + } + return +} diff --git a/vendor/github.com/gogs/chardet/utf8.go b/vendor/github.com/gogs/chardet/utf8.go new file mode 100644 index 0000000000..ae036ad9bc --- /dev/null +++ b/vendor/github.com/gogs/chardet/utf8.go @@ -0,0 +1,71 @@ +package chardet + +import ( + "bytes" +) + +var utf8Bom = []byte{0xEF, 0xBB, 0xBF} + +type recognizerUtf8 struct { +} + +func newRecognizer_utf8() *recognizerUtf8 { + return &recognizerUtf8{} +} + +func (*recognizerUtf8) Match(input *recognizerInput) (output recognizerOutput) { + output = recognizerOutput{ + Charset: "UTF-8", + } + hasBom := bytes.HasPrefix(input.raw, utf8Bom) + inputLen := len(input.raw) + var numValid, numInvalid uint32 + var trailBytes uint8 + for i := 0; i < inputLen; i++ { + c := input.raw[i] + if c&0x80 == 0 { + continue + } + if c&0xE0 == 0xC0 { + trailBytes = 1 + } else if c&0xF0 == 0xE0 { + trailBytes = 2 + } else if c&0xF8 == 0xF0 { + trailBytes = 3 + } else { + numInvalid++ + if numInvalid > 5 { + break + } + trailBytes = 0 + } + + for i++; i < inputLen; i++ { + c = input.raw[i] + if c&0xC0 != 0x80 { + numInvalid++ + break + } + if trailBytes--; trailBytes == 0 { + numValid++ + break + } + } + } + + if hasBom && numInvalid == 0 { + output.Confidence = 100 + } else if hasBom && numValid > numInvalid*10 { + output.Confidence = 80 + } else if numValid > 3 && numInvalid == 0 { + output.Confidence = 100 + } else if numValid > 0 && numInvalid == 0 { + output.Confidence = 80 + } else if numValid == 0 && numInvalid == 0 { + // Plain ASCII + output.Confidence = 10 + } else if numValid > numInvalid*10 { + output.Confidence = 25 + } + return +} diff --git a/vendor/github.com/gorilla/css/LICENSE b/vendor/github.com/gorilla/css/LICENSE new file mode 100644 index 0000000000..ee0d53ceff --- /dev/null +++ b/vendor/github.com/gorilla/css/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2023 The Gorilla Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/vendor/github.com/gorilla/css/scanner/doc.go b/vendor/github.com/gorilla/css/scanner/doc.go new file mode 100644 index 0000000000..f19850e15f --- /dev/null +++ b/vendor/github.com/gorilla/css/scanner/doc.go @@ -0,0 +1,33 @@ +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package gorilla/css/scanner generates tokens for a CSS3 input. + +It follows the CSS3 specification located at: + + http://www.w3.org/TR/css3-syntax/ + +To use it, create a new scanner for a given CSS string and call Next() until +the token returned has type TokenEOF or TokenError: + + s := scanner.New(myCSS) + for { + token := s.Next() + if token.Type == scanner.TokenEOF || token.Type == scanner.TokenError { + break + } + // Do something with the token... + } + +Following the CSS3 specification, an error can only occur when the scanner +finds an unclosed quote or unclosed comment. In these cases the text becomes +"untokenizable". Everything else is tokenizable and it is up to a parser +to make sense of the token stream (or ignore nonsensical token sequences). + +Note: the scanner doesn't perform lexical analysis or, in other words, it +doesn't care about the token context. It is intended to be used by a +lexer or parser. +*/ +package scanner diff --git a/vendor/github.com/gorilla/css/scanner/scanner.go b/vendor/github.com/gorilla/css/scanner/scanner.go new file mode 100644 index 0000000000..25a7c6576e --- /dev/null +++ b/vendor/github.com/gorilla/css/scanner/scanner.go @@ -0,0 +1,360 @@ +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package scanner + +import ( + "fmt" + "regexp" + "strings" + "unicode" + "unicode/utf8" +) + +// tokenType identifies the type of lexical tokens. +type tokenType int + +// String returns a string representation of the token type. +func (t tokenType) String() string { + return tokenNames[t] +} + +// Token represents a token and the corresponding string. +type Token struct { + Type tokenType + Value string + Line int + Column int +} + +// String returns a string representation of the token. +func (t *Token) String() string { + if len(t.Value) > 10 { + return fmt.Sprintf("%s (line: %d, column: %d): %.10q...", + t.Type, t.Line, t.Column, t.Value) + } + return fmt.Sprintf("%s (line: %d, column: %d): %q", + t.Type, t.Line, t.Column, t.Value) +} + +// All tokens ----------------------------------------------------------------- + +// The complete list of tokens in CSS3. +const ( + // Scanner flags. + TokenError tokenType = iota + TokenEOF + // From now on, only tokens from the CSS specification. + TokenIdent + TokenAtKeyword + TokenString + TokenHash + TokenNumber + TokenPercentage + TokenDimension + TokenURI + TokenUnicodeRange + TokenCDO + TokenCDC + TokenS + TokenComment + TokenFunction + TokenIncludes + TokenDashMatch + TokenPrefixMatch + TokenSuffixMatch + TokenSubstringMatch + TokenChar + TokenBOM +) + +// tokenNames maps tokenType's to their names. Used for conversion to string. +var tokenNames = map[tokenType]string{ + TokenError: "error", + TokenEOF: "EOF", + TokenIdent: "IDENT", + TokenAtKeyword: "ATKEYWORD", + TokenString: "STRING", + TokenHash: "HASH", + TokenNumber: "NUMBER", + TokenPercentage: "PERCENTAGE", + TokenDimension: "DIMENSION", + TokenURI: "URI", + TokenUnicodeRange: "UNICODE-RANGE", + TokenCDO: "CDO", + TokenCDC: "CDC", + TokenS: "S", + TokenComment: "COMMENT", + TokenFunction: "FUNCTION", + TokenIncludes: "INCLUDES", + TokenDashMatch: "DASHMATCH", + TokenPrefixMatch: "PREFIXMATCH", + TokenSuffixMatch: "SUFFIXMATCH", + TokenSubstringMatch: "SUBSTRINGMATCH", + TokenChar: "CHAR", + TokenBOM: "BOM", +} + +// Macros and productions ----------------------------------------------------- +// http://www.w3.org/TR/css3-syntax/#tokenization + +var macroRegexp = regexp.MustCompile(`\{[a-z]+\}`) + +// macros maps macro names to patterns to be expanded. +var macros = map[string]string{ + // must be escaped: `\.+*?()|[]{}^$` + "ident": `-?{nmstart}{nmchar}*`, + "name": `{nmchar}+`, + "nmstart": `[a-zA-Z_]|{nonascii}|{escape}`, + "nonascii": "[\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]", + "unicode": `\\[0-9a-fA-F]{1,6}{wc}?`, + "escape": "{unicode}|\\\\[\u0020-\u007E\u0080-\uD7FF\uE000-\uFFFD\U00010000-\U0010FFFF]", + "nmchar": `[a-zA-Z0-9_-]|{nonascii}|{escape}`, + "num": `[0-9]*\.[0-9]+|[0-9]+`, + "string": `"(?:{stringchar}|')*"|'(?:{stringchar}|")*'`, + "stringchar": `{urlchar}|[ ]|\\{nl}`, + "nl": `[\n\r\f]|\r\n`, + "w": `{wc}*`, + "wc": `[\t\n\f\r ]`, + + // urlchar should accept [(ascii characters minus those that need escaping)|{nonascii}|{escape}] + // ASCII characters range = `[\u0020-\u007e]` + // Skip space \u0020 = `[\u0021-\u007e]` + // Skip quotation mark \0022 = `[\u0021\u0023-\u007e]` + // Skip apostrophe \u0027 = `[\u0021\u0023-\u0026\u0028-\u007e]` + // Skip reverse solidus \u005c = `[\u0021\u0023-\u0026\u0028-\u005b\u005d\u007e]` + // Finally, the left square bracket (\u005b) and right (\u005d) needs escaping themselves + "urlchar": "[\u0021\u0023-\u0026\u0028-\\\u005b\\\u005d-\u007E]|{nonascii}|{escape}", +} + +// productions maps the list of tokens to patterns to be expanded. +var productions = map[tokenType]string{ + // Unused regexps (matched using other methods) are commented out. + TokenIdent: `{ident}`, + TokenAtKeyword: `@{ident}`, + TokenString: `{string}`, + TokenHash: `#{name}`, + TokenNumber: `{num}`, + TokenPercentage: `{num}%`, + TokenDimension: `{num}{ident}`, + TokenURI: `url\({w}(?:{string}|{urlchar}*?){w}\)`, + TokenUnicodeRange: `U\+[0-9A-F\?]{1,6}(?:-[0-9A-F]{1,6})?`, + //TokenCDO: ``, + TokenS: `{wc}+`, + TokenComment: `/\*[^\*]*[\*]+(?:[^/][^\*]*[\*]+)*/`, + TokenFunction: `{ident}\(`, + //TokenIncludes: `~=`, + //TokenDashMatch: `\|=`, + //TokenPrefixMatch: `\^=`, + //TokenSuffixMatch: `\$=`, + //TokenSubstringMatch: `\*=`, + //TokenChar: `[^"']`, + //TokenBOM: "\uFEFF", +} + +// matchers maps the list of tokens to compiled regular expressions. +// +// The map is filled on init() using the macros and productions defined in +// the CSS specification. +var matchers = map[tokenType]*regexp.Regexp{} + +// matchOrder is the order to test regexps when first-char shortcuts +// can't be used. +var matchOrder = []tokenType{ + TokenURI, + TokenFunction, + TokenUnicodeRange, + TokenIdent, + TokenDimension, + TokenPercentage, + TokenNumber, + TokenCDC, +} + +func init() { + // replace macros and compile regexps for productions. + replaceMacro := func(s string) string { + return "(?:" + macros[s[1:len(s)-1]] + ")" + } + for t, s := range productions { + for macroRegexp.MatchString(s) { + s = macroRegexp.ReplaceAllStringFunc(s, replaceMacro) + } + matchers[t] = regexp.MustCompile("^(?:" + s + ")") + } +} + +// Scanner -------------------------------------------------------------------- + +// New returns a new CSS scanner for the given input. +func New(input string) *Scanner { + // Normalize newlines. + // https://www.w3.org/TR/css-syntax-3/#input-preprocessing + input = strings.Replace(input, "\r\n", "\n", -1) + input = strings.Replace(input, "\r", "\n", -1) + input = strings.Replace(input, "\f", "\n", -1) + input = strings.Replace(input, "\u0000", "\ufffd", -1) + return &Scanner{ + input: input, + row: 1, + col: 1, + } +} + +// Scanner scans an input and emits tokens following the CSS3 specification. +type Scanner struct { + input string + pos int + row int + col int + err *Token +} + +// Next returns the next token from the input. +// +// At the end of the input the token type is TokenEOF. +// +// If the input can't be tokenized the token type is TokenError. This occurs +// in case of unclosed quotation marks or comments. +func (s *Scanner) Next() *Token { + if s.err != nil { + return s.err + } + if s.pos >= len(s.input) { + s.err = &Token{TokenEOF, "", s.row, s.col} + return s.err + } + if s.pos == 0 { + // Test BOM only once, at the beginning of the file. + if strings.HasPrefix(s.input, "\uFEFF") { + return s.emitSimple(TokenBOM, "\uFEFF") + } + } + // There's a lot we can guess based on the first byte so we'll take a + // shortcut before testing multiple regexps. + input := s.input[s.pos:] + switch input[0] { + case '\t', '\n', ' ': + // Whitespace. + return s.emitToken(TokenS, matchers[TokenS].FindString(input)) + case '.': + // Dot is too common to not have a quick check. + // We'll test if this is a Char; if it is followed by a number it is a + // dimension/percentage/number, and this will be matched later. + if len(input) > 1 && !unicode.IsDigit(rune(input[1])) { + return s.emitSimple(TokenChar, ".") + } + case '#': + // Another common one: Hash or Char. + if match := matchers[TokenHash].FindString(input); match != "" { + return s.emitToken(TokenHash, match) + } + return s.emitSimple(TokenChar, "#") + case '@': + // Another common one: AtKeyword or Char. + if match := matchers[TokenAtKeyword].FindString(input); match != "" { + return s.emitSimple(TokenAtKeyword, match) + } + return s.emitSimple(TokenChar, "@") + case ':', ',', ';', '%', '&', '+', '=', '>', '(', ')', '[', ']', '{', '}': + // More common chars. + return s.emitSimple(TokenChar, string(input[0])) + case '"', '\'': + // String or error. + match := matchers[TokenString].FindString(input) + if match != "" { + return s.emitToken(TokenString, match) + } + + s.err = &Token{TokenError, "unclosed quotation mark", s.row, s.col} + return s.err + case '/': + // Comment, error or Char. + if len(input) > 1 && input[1] == '*' { + match := matchers[TokenComment].FindString(input) + if match != "" { + return s.emitToken(TokenComment, match) + } else { + s.err = &Token{TokenError, "unclosed comment", s.row, s.col} + return s.err + } + } + return s.emitSimple(TokenChar, "/") + case '~': + // Includes or Char. + return s.emitPrefixOrChar(TokenIncludes, "~=") + case '|': + // DashMatch or Char. + return s.emitPrefixOrChar(TokenDashMatch, "|=") + case '^': + // PrefixMatch or Char. + return s.emitPrefixOrChar(TokenPrefixMatch, "^=") + case '$': + // SuffixMatch or Char. + return s.emitPrefixOrChar(TokenSuffixMatch, "$=") + case '*': + // SubstringMatch or Char. + return s.emitPrefixOrChar(TokenSubstringMatch, "*=") + case '<': + // CDO or Char. + return s.emitPrefixOrChar(TokenCDO, " which includes the use of that to permit +// conditionals as per https://docs.microsoft.com/en-us/previous-versions/windows/internet-explorer/ie-developer/compatibility/ms537512(v=vs.85)?redirectedfrom=MSDN +// +// What is not permitted are CDATA XML comments, as the x/net/html package we depend +// on does not handle this fully and we are not choosing to take on that work: +// https://pkg.go.dev/golang.org/x/net/html#Tokenizer.AllowCDATA . If the x/net/html +// package changes this then these will be considered, otherwise if you AllowComments +// but provide a CDATA comment, then as per the documentation in x/net/html this will +// be treated as a plain HTML comment. +func (p *Policy) AllowComments() { + p.allowComments = true +} + +// AllowNoAttrs says that attributes on element are optional. +// +// The attribute policy is only added to the core policy when OnElements(...) +// are called. +func (p *Policy) AllowNoAttrs() *attrPolicyBuilder { + + p.init() + + abp := attrPolicyBuilder{ + p: p, + allowEmpty: true, + } + return &abp +} + +// AllowNoAttrs says that attributes on element are optional. +// +// The attribute policy is only added to the core policy when OnElements(...) +// are called. +func (abp *attrPolicyBuilder) AllowNoAttrs() *attrPolicyBuilder { + + abp.allowEmpty = true + + return abp +} + +// Matching allows a regular expression to be applied to a nascent attribute +// policy, and returns the attribute policy. +func (abp *attrPolicyBuilder) Matching(regex *regexp.Regexp) *attrPolicyBuilder { + + abp.regexp = regex + + return abp +} + +// OnElements will bind an attribute policy to a given range of HTML elements +// and return the updated policy +func (abp *attrPolicyBuilder) OnElements(elements ...string) *Policy { + + for _, element := range elements { + element = strings.ToLower(element) + + for _, attr := range abp.attrNames { + + if _, ok := abp.p.elsAndAttrs[element]; !ok { + abp.p.elsAndAttrs[element] = make(map[string][]attrPolicy) + } + + ap := attrPolicy{} + if abp.regexp != nil { + ap.regexp = abp.regexp + } + + abp.p.elsAndAttrs[element][attr] = append(abp.p.elsAndAttrs[element][attr], ap) + } + + if abp.allowEmpty { + abp.p.setOfElementsAllowedWithoutAttrs[element] = struct{}{} + + if _, ok := abp.p.elsAndAttrs[element]; !ok { + abp.p.elsAndAttrs[element] = make(map[string][]attrPolicy) + } + } + } + + return abp.p +} + +// OnElementsMatching will bind an attribute policy to all elements matching a given regex +// and return the updated policy +func (abp *attrPolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy { + for _, attr := range abp.attrNames { + if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok { + abp.p.elsMatchingAndAttrs[regex] = make(map[string][]attrPolicy) + } + ap := attrPolicy{} + if abp.regexp != nil { + ap.regexp = abp.regexp + } + abp.p.elsMatchingAndAttrs[regex][attr] = append(abp.p.elsMatchingAndAttrs[regex][attr], ap) + } + + if abp.allowEmpty { + abp.p.setOfElementsMatchingAllowedWithoutAttrs = append(abp.p.setOfElementsMatchingAllowedWithoutAttrs, regex) + if _, ok := abp.p.elsMatchingAndAttrs[regex]; !ok { + abp.p.elsMatchingAndAttrs[regex] = make(map[string][]attrPolicy) + } + } + + return abp.p +} + +// Globally will bind an attribute policy to all HTML elements and return the +// updated policy +func (abp *attrPolicyBuilder) Globally() *Policy { + + for _, attr := range abp.attrNames { + if _, ok := abp.p.globalAttrs[attr]; !ok { + abp.p.globalAttrs[attr] = []attrPolicy{} + } + + ap := attrPolicy{} + if abp.regexp != nil { + ap.regexp = abp.regexp + } + + abp.p.globalAttrs[attr] = append(abp.p.globalAttrs[attr], ap) + } + + return abp.p +} + +// AllowStyles takes a range of CSS property names and returns a +// style policy builder that allows you to specify the pattern and scope of +// the allowed property. +// +// The style policy is only added to the core policy when either Globally() +// or OnElements(...) are called. +func (p *Policy) AllowStyles(propertyNames ...string) *stylePolicyBuilder { + + p.init() + + abp := stylePolicyBuilder{ + p: p, + } + + for _, propertyName := range propertyNames { + abp.propertyNames = append(abp.propertyNames, strings.ToLower(propertyName)) + } + + return &abp +} + +// Matching allows a regular expression to be applied to a nascent style +// policy, and returns the style policy. +func (spb *stylePolicyBuilder) Matching(regex *regexp.Regexp) *stylePolicyBuilder { + + spb.regexp = regex + + return spb +} + +// MatchingEnum allows a list of allowed values to be applied to a nascent style +// policy, and returns the style policy. +func (spb *stylePolicyBuilder) MatchingEnum(enum ...string) *stylePolicyBuilder { + + spb.enum = enum + + return spb +} + +// MatchingHandler allows a handler to be applied to a nascent style +// policy, and returns the style policy. +func (spb *stylePolicyBuilder) MatchingHandler(handler func(string) bool) *stylePolicyBuilder { + + spb.handler = handler + + return spb +} + +// OnElements will bind a style policy to a given range of HTML elements +// and return the updated policy +func (spb *stylePolicyBuilder) OnElements(elements ...string) *Policy { + + for _, element := range elements { + element = strings.ToLower(element) + + for _, attr := range spb.propertyNames { + + if _, ok := spb.p.elsAndStyles[element]; !ok { + spb.p.elsAndStyles[element] = make(map[string][]stylePolicy) + } + + sp := stylePolicy{} + if spb.handler != nil { + sp.handler = spb.handler + } else if len(spb.enum) > 0 { + sp.enum = spb.enum + } else if spb.regexp != nil { + sp.regexp = spb.regexp + } else { + sp.handler = css.GetDefaultHandler(attr) + } + spb.p.elsAndStyles[element][attr] = append(spb.p.elsAndStyles[element][attr], sp) + } + } + + return spb.p +} + +// OnElementsMatching will bind a style policy to any HTML elements matching the pattern +// and return the updated policy +func (spb *stylePolicyBuilder) OnElementsMatching(regex *regexp.Regexp) *Policy { + + for _, attr := range spb.propertyNames { + + if _, ok := spb.p.elsMatchingAndStyles[regex]; !ok { + spb.p.elsMatchingAndStyles[regex] = make(map[string][]stylePolicy) + } + + sp := stylePolicy{} + if spb.handler != nil { + sp.handler = spb.handler + } else if len(spb.enum) > 0 { + sp.enum = spb.enum + } else if spb.regexp != nil { + sp.regexp = spb.regexp + } else { + sp.handler = css.GetDefaultHandler(attr) + } + spb.p.elsMatchingAndStyles[regex][attr] = append(spb.p.elsMatchingAndStyles[regex][attr], sp) + } + + return spb.p +} + +// Globally will bind a style policy to all HTML elements and return the +// updated policy +func (spb *stylePolicyBuilder) Globally() *Policy { + + for _, attr := range spb.propertyNames { + if _, ok := spb.p.globalStyles[attr]; !ok { + spb.p.globalStyles[attr] = []stylePolicy{} + } + + // Use only one strategy for validating styles, fallback to default + sp := stylePolicy{} + if spb.handler != nil { + sp.handler = spb.handler + } else if len(spb.enum) > 0 { + sp.enum = spb.enum + } else if spb.regexp != nil { + sp.regexp = spb.regexp + } else { + sp.handler = css.GetDefaultHandler(attr) + } + spb.p.globalStyles[attr] = append(spb.p.globalStyles[attr], sp) + } + + return spb.p +} + +// AllowElements will append HTML elements to the allowlist without applying an +// attribute policy to those elements (the elements are permitted +// sans-attributes) +func (p *Policy) AllowElements(names ...string) *Policy { + p.init() + + for _, element := range names { + element = strings.ToLower(element) + + if _, ok := p.elsAndAttrs[element]; !ok { + p.elsAndAttrs[element] = make(map[string][]attrPolicy) + } + } + + return p +} + +// AllowElementsMatching will append HTML elements to the allowlist if they +// match a regexp. +func (p *Policy) AllowElementsMatching(regex *regexp.Regexp) *Policy { + p.init() + if _, ok := p.elsMatchingAndAttrs[regex]; !ok { + p.elsMatchingAndAttrs[regex] = make(map[string][]attrPolicy) + } + return p +} + +// AllowURLSchemesMatching will append URL schemes to the allowlist if they +// match a regexp. +func (p *Policy) AllowURLSchemesMatching(r *regexp.Regexp) *Policy { + p.allowURLSchemeRegexps = append(p.allowURLSchemeRegexps, r) + return p +} + +// RewriteSrc will rewrite the src attribute of a resource downloading tag +// (e.g. , tag. +func (p *Policy) addDefaultSkipElementContent() { + p.init() + + p.setOfElementsToSkipContent["frame"] = struct{}{} + p.setOfElementsToSkipContent["frameset"] = struct{}{} + p.setOfElementsToSkipContent["iframe"] = struct{}{} + p.setOfElementsToSkipContent["noembed"] = struct{}{} + p.setOfElementsToSkipContent["noframes"] = struct{}{} + p.setOfElementsToSkipContent["noscript"] = struct{}{} + p.setOfElementsToSkipContent["nostyle"] = struct{}{} + p.setOfElementsToSkipContent["object"] = struct{}{} + p.setOfElementsToSkipContent["script"] = struct{}{} + p.setOfElementsToSkipContent["style"] = struct{}{} + p.setOfElementsToSkipContent["title"] = struct{}{} +} diff --git a/vendor/github.com/microcosm-cc/bluemonday/sanitize.go b/vendor/github.com/microcosm-cc/bluemonday/sanitize.go new file mode 100644 index 0000000000..47c31f7daf --- /dev/null +++ b/vendor/github.com/microcosm-cc/bluemonday/sanitize.go @@ -0,0 +1,1096 @@ +// Copyright (c) 2014, David Kitchen +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// * Redistributions of source code must retain the above copyright notice, this +// list of conditions and the following disclaimer. +// +// * Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// * Neither the name of the organisation (Microcosm) nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package bluemonday + +import ( + "bytes" + "fmt" + "io" + "net/url" + "regexp" + "strconv" + "strings" + + "golang.org/x/net/html" + + "github.com/aymerick/douceur/parser" +) + +var ( + dataAttribute = regexp.MustCompile("^data-.+") + dataAttributeXMLPrefix = regexp.MustCompile("^xml.+") + dataAttributeInvalidChars = regexp.MustCompile("[A-Z;]+") + cssUnicodeChar = regexp.MustCompile(`\\[0-9a-f]{1,6} ?`) + dataURIbase64Prefix = regexp.MustCompile(`^data:[^,]*;base64,`) +) + +// Sanitize takes a string that contains a HTML fragment or document and applies +// the given policy allowlist. +// +// It returns a HTML string that has been sanitized by the policy or an empty +// string if an error has occurred (most likely as a consequence of extremely +// malformed input) +func (p *Policy) Sanitize(s string) string { + if strings.TrimSpace(s) == "" { + return s + } + + return p.sanitizeWithBuff(strings.NewReader(s)).String() +} + +// SanitizeBytes takes a []byte that contains a HTML fragment or document and applies +// the given policy allowlist. +// +// It returns a []byte containing the HTML that has been sanitized by the policy +// or an empty []byte if an error has occurred (most likely as a consequence of +// extremely malformed input) +func (p *Policy) SanitizeBytes(b []byte) []byte { + if len(bytes.TrimSpace(b)) == 0 { + return b + } + + return p.sanitizeWithBuff(bytes.NewReader(b)).Bytes() +} + +// SanitizeReader takes an io.Reader that contains a HTML fragment or document +// and applies the given policy allowlist. +// +// It returns a bytes.Buffer containing the HTML that has been sanitized by the +// policy. Errors during sanitization will merely return an empty result. +func (p *Policy) SanitizeReader(r io.Reader) *bytes.Buffer { + return p.sanitizeWithBuff(r) +} + +// SanitizeReaderToWriter takes an io.Reader that contains a HTML fragment or document +// and applies the given policy allowlist and writes to the provided writer returning +// an error if there is one. +func (p *Policy) SanitizeReaderToWriter(r io.Reader, w io.Writer) error { + return p.sanitize(r, w) +} + +// Query represents a single part of the query string, a query param +type Query struct { + Key string + Value string + HasValue bool +} + +func parseQuery(query string) (values []Query, err error) { + // This is essentially a copy of parseQuery from + // https://golang.org/src/net/url/url.go but adjusted to build our values + // based on our type, which we need to preserve the ordering of the query + // string + for query != "" { + key := query + if i := strings.IndexAny(key, "&;"); i >= 0 { + key, query = key[:i], key[i+1:] + } else { + query = "" + } + if key == "" { + continue + } + value := "" + hasValue := false + if i := strings.Index(key, "="); i >= 0 { + key, value = key[:i], key[i+1:] + hasValue = true + } + key, err1 := url.QueryUnescape(key) + if err1 != nil { + if err == nil { + err = err1 + } + continue + } + value, err1 = url.QueryUnescape(value) + if err1 != nil { + if err == nil { + err = err1 + } + continue + } + values = append(values, Query{ + Key: key, + Value: value, + HasValue: hasValue, + }) + } + return values, err +} + +func encodeQueries(queries []Query) string { + var buff bytes.Buffer + for i, query := range queries { + buff.WriteString(url.QueryEscape(query.Key)) + if query.HasValue { + buff.WriteString("=") + buff.WriteString(url.QueryEscape(query.Value)) + } + if i < len(queries)-1 { + buff.WriteString("&") + } + } + return buff.String() +} + +func sanitizedURL(val string) (string, error) { + u, err := url.Parse(val) + if err != nil { + return "", err + } + + // we use parseQuery but not u.Query to keep the order not change because + // url.Values is a map which has a random order. + queryValues, err := parseQuery(u.RawQuery) + if err != nil { + return "", err + } + // sanitize the url query params + for i, query := range queryValues { + queryValues[i].Key = html.EscapeString(query.Key) + } + u.RawQuery = encodeQueries(queryValues) + // u.String() will also sanitize host/scheme/user/pass + return u.String(), nil +} + +// Performs the actual sanitization process. +func (p *Policy) sanitizeWithBuff(r io.Reader) *bytes.Buffer { + var buff bytes.Buffer + if err := p.sanitize(r, &buff); err != nil { + return &bytes.Buffer{} + } + return &buff +} + +type asStringWriter struct { + io.Writer +} + +func (a *asStringWriter) WriteString(s string) (int, error) { + return a.Write([]byte(s)) +} + +func (p *Policy) sanitize(r io.Reader, w io.Writer) error { + // It is possible that the developer has created the policy via: + // p := bluemonday.Policy{} + // rather than: + // p := bluemonday.NewPolicy() + // If this is the case, and if they haven't yet triggered an action that + // would initialize the maps, then we need to do that. + p.init() + + buff, ok := w.(stringWriterWriter) + if !ok { + buff = &asStringWriter{w} + } + + var ( + skipElementContent bool + skippingElementsCount int64 + skipClosingTag bool + closingTagToSkipStack []string + mostRecentlyStartedToken string + ) + + tokenizer := html.NewTokenizer(r) + for { + if tokenizer.Next() == html.ErrorToken { + err := tokenizer.Err() + if err == io.EOF { + // End of input means end of processing + return nil + } + + // Raw tokenizer error + return err + } + + token := tokenizer.Token() + switch token.Type { + case html.DoctypeToken: + + // DocType is not handled as there is no safe parsing mechanism + // provided by golang.org/x/net/html for the content, and this can + // be misused to insert HTML tags that are not then sanitized + // + // One might wish to recursively sanitize here using the same policy + // but I will need to do some further testing before considering + // this. + + case html.CommentToken: + + // Comments are ignored by default + if p.allowComments { + // But if allowed then write the comment out as-is + buff.WriteString(token.String()) + } + + case html.StartTagToken: + + mostRecentlyStartedToken = normaliseElementName(token.Data) + + switch normaliseElementName(token.Data) { + case `script`: + if !p.allowUnsafe { + continue + } + case `style`: + if !p.allowUnsafe { + continue + } + } + + aps, ok := p.elsAndAttrs[token.Data] + if !ok { + aa, matched := p.matchRegex(token.Data) + if !matched { + if _, ok := p.setOfElementsToSkipContent[token.Data]; ok { + skipElementContent = true + skippingElementsCount++ + } + if p.addSpaces { + if _, err := buff.WriteString(" "); err != nil { + return err + } + } + break + } + aps = aa + } + if len(token.Attr) != 0 { + token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps) + } + + if len(token.Attr) == 0 { + if !p.allowNoAttrs(token.Data) { + skipClosingTag = true + closingTagToSkipStack = append(closingTagToSkipStack, token.Data) + if p.addSpaces { + if _, err := buff.WriteString(" "); err != nil { + return err + } + } + break + } + } + + if !skipElementContent { + if _, err := buff.WriteString(token.String()); err != nil { + return err + } + } + + case html.EndTagToken: + + if mostRecentlyStartedToken == normaliseElementName(token.Data) { + mostRecentlyStartedToken = "" + } + + switch normaliseElementName(token.Data) { + case `script`: + if !p.allowUnsafe { + continue + } + case `style`: + if !p.allowUnsafe { + continue + } + } + + if skipClosingTag && closingTagToSkipStack[len(closingTagToSkipStack)-1] == token.Data { + closingTagToSkipStack = closingTagToSkipStack[:len(closingTagToSkipStack)-1] + if len(closingTagToSkipStack) == 0 { + skipClosingTag = false + } + if p.addSpaces { + if _, err := buff.WriteString(" "); err != nil { + return err + } + } + break + } + if _, ok := p.elsAndAttrs[token.Data]; !ok { + match := false + for regex := range p.elsMatchingAndAttrs { + if regex.MatchString(token.Data) { + skipElementContent = false + match = true + break + } + } + if _, ok := p.setOfElementsToSkipContent[token.Data]; ok && !match { + skippingElementsCount-- + if skippingElementsCount == 0 { + skipElementContent = false + } + } + if !match { + if p.addSpaces { + if _, err := buff.WriteString(" "); err != nil { + return err + } + } + break + } + } + + if !skipElementContent { + if _, err := buff.WriteString(token.String()); err != nil { + return err + } + } + + case html.SelfClosingTagToken: + + switch normaliseElementName(token.Data) { + case `script`: + if !p.allowUnsafe { + continue + } + case `style`: + if !p.allowUnsafe { + continue + } + } + + aps, ok := p.elsAndAttrs[token.Data] + if !ok { + aa, matched := p.matchRegex(token.Data) + if !matched { + if p.addSpaces && !matched { + if _, err := buff.WriteString(" "); err != nil { + return err + } + } + break + } + aps = aa + } + + if len(token.Attr) != 0 { + token.Attr = p.sanitizeAttrs(token.Data, token.Attr, aps) + } + + if len(token.Attr) == 0 && !p.allowNoAttrs(token.Data) { + if p.addSpaces { + if _, err := buff.WriteString(" "); err != nil { + return err + } + } + break + } + if !skipElementContent { + if _, err := buff.WriteString(token.String()); err != nil { + return err + } + } + + case html.TextToken: + + if !skipElementContent { + switch mostRecentlyStartedToken { + case `script`: + // not encouraged, but if a policy allows JavaScript we + // should not HTML escape it as that would break the output + // + // requires p.AllowUnsafe() + if p.allowUnsafe { + if _, err := buff.WriteString(token.Data); err != nil { + return err + } + } + case "style": + // not encouraged, but if a policy allows CSS styles we + // should not HTML escape it as that would break the output + // + // requires p.AllowUnsafe() + if p.allowUnsafe { + if _, err := buff.WriteString(token.Data); err != nil { + return err + } + } + default: + // HTML escape the text + if _, err := buff.WriteString(token.String()); err != nil { + return err + } + } + } + + default: + // A token that didn't exist in the html package when we wrote this + return fmt.Errorf("unknown token: %v", token) + } + } +} + +// sanitizeAttrs takes a set of element attribute policies and the global +// attribute policies and applies them to the []html.Attribute returning a set +// of html.Attributes that match the policies +func (p *Policy) sanitizeAttrs( + elementName string, + attrs []html.Attribute, + aps map[string][]attrPolicy, +) []html.Attribute { + + if len(attrs) == 0 { + return attrs + } + + hasStylePolicies := false + sps, elementHasStylePolicies := p.elsAndStyles[elementName] + if len(p.globalStyles) > 0 || (elementHasStylePolicies && len(sps) > 0) { + hasStylePolicies = true + } + // no specific element policy found, look for a pattern match + if !hasStylePolicies { + for k, v := range p.elsMatchingAndStyles { + if k.MatchString(elementName) { + if len(v) > 0 { + hasStylePolicies = true + break + } + } + } + } + + // Builds a new attribute slice based on the whether the attribute has been + // allowed explicitly or globally. + cleanAttrs := []html.Attribute{} +attrsLoop: + for _, htmlAttr := range attrs { + if p.allowDataAttributes { + // If we see a data attribute, let it through. + if isDataAttribute(htmlAttr.Key) { + cleanAttrs = append(cleanAttrs, htmlAttr) + continue + } + } + // Is this a "style" attribute, and if so, do we need to sanitize it? + if htmlAttr.Key == "style" && hasStylePolicies { + htmlAttr = p.sanitizeStyles(htmlAttr, elementName) + if htmlAttr.Val == "" { + // We've sanitized away any and all styles; don't bother to + // output the style attribute (even if it's allowed) + continue + } else { + cleanAttrs = append(cleanAttrs, htmlAttr) + continue + } + } + + // Is there an element specific attribute policy that applies? + if apl, ok := aps[htmlAttr.Key]; ok { + for _, ap := range apl { + if ap.regexp != nil { + if ap.regexp.MatchString(htmlAttr.Val) { + cleanAttrs = append(cleanAttrs, htmlAttr) + continue attrsLoop + } + } else { + cleanAttrs = append(cleanAttrs, htmlAttr) + continue attrsLoop + } + } + } + + // Is there a global attribute policy that applies? + if apl, ok := p.globalAttrs[htmlAttr.Key]; ok { + for _, ap := range apl { + if ap.regexp != nil { + if ap.regexp.MatchString(htmlAttr.Val) { + cleanAttrs = append(cleanAttrs, htmlAttr) + continue attrsLoop + } + } else { + cleanAttrs = append(cleanAttrs, htmlAttr) + continue attrsLoop + } + } + } + } + + if len(cleanAttrs) == 0 { + // If nothing was allowed, let's get out of here + return cleanAttrs + } + // cleanAttrs now contains the attributes that are permitted + + if linkable(elementName) { + if p.requireParseableURLs { + // Ensure URLs are parseable: + // - a.href + // - area.href + // - link.href + // - blockquote.cite + // - q.cite + // - img.src + // - script.src + tmpAttrs := []html.Attribute{} + for _, htmlAttr := range cleanAttrs { + switch elementName { + case "a", "area", "base", "link": + if htmlAttr.Key == "href" { + if u, ok := p.validURL(htmlAttr.Val); ok { + htmlAttr.Val = u + tmpAttrs = append(tmpAttrs, htmlAttr) + } + break + } + tmpAttrs = append(tmpAttrs, htmlAttr) + case "blockquote", "del", "ins", "q": + if htmlAttr.Key == "cite" { + if u, ok := p.validURL(htmlAttr.Val); ok { + htmlAttr.Val = u + tmpAttrs = append(tmpAttrs, htmlAttr) + } + break + } + tmpAttrs = append(tmpAttrs, htmlAttr) + case "audio", "embed", "iframe", "img", "script", "source", "track", "video": + if htmlAttr.Key == "src" { + if u, ok := p.validURL(htmlAttr.Val); ok { + if p.srcRewriter != nil { + parsedURL, err := url.Parse(u) + if err != nil { + fmt.Println(err) + } + p.srcRewriter(parsedURL) + u = parsedURL.String() + } + htmlAttr.Val = u + tmpAttrs = append(tmpAttrs, htmlAttr) + } + break + } + tmpAttrs = append(tmpAttrs, htmlAttr) + default: + tmpAttrs = append(tmpAttrs, htmlAttr) + } + } + cleanAttrs = tmpAttrs + } + + if (p.requireNoFollow || + p.requireNoFollowFullyQualifiedLinks || + p.requireNoReferrer || + p.requireNoReferrerFullyQualifiedLinks || + p.addTargetBlankToFullyQualifiedLinks) && + len(cleanAttrs) > 0 { + + // Add rel="nofollow" if a "href" exists + switch elementName { + case "a", "area", "base", "link": + var hrefFound bool + var externalLink bool + for _, htmlAttr := range cleanAttrs { + if htmlAttr.Key == "href" { + hrefFound = true + + u, err := url.Parse(htmlAttr.Val) + if err != nil { + continue + } + if u.Host != "" { + externalLink = true + } + + continue + } + } + + if hrefFound { + var ( + noFollowFound bool + noReferrerFound bool + targetBlankFound bool + ) + + addNoFollow := (p.requireNoFollow || + externalLink && p.requireNoFollowFullyQualifiedLinks) + + addNoReferrer := (p.requireNoReferrer || + externalLink && p.requireNoReferrerFullyQualifiedLinks) + + addTargetBlank := (externalLink && + p.addTargetBlankToFullyQualifiedLinks) + + tmpAttrs := []html.Attribute{} + for _, htmlAttr := range cleanAttrs { + + var appended bool + if htmlAttr.Key == "rel" && (addNoFollow || addNoReferrer) { + + if addNoFollow && !strings.Contains(htmlAttr.Val, "nofollow") { + htmlAttr.Val += " nofollow" + } + if addNoReferrer && !strings.Contains(htmlAttr.Val, "noreferrer") { + htmlAttr.Val += " noreferrer" + } + noFollowFound = addNoFollow + noReferrerFound = addNoReferrer + tmpAttrs = append(tmpAttrs, htmlAttr) + appended = true + } + + if elementName == "a" && htmlAttr.Key == "target" { + if htmlAttr.Val == "_blank" { + targetBlankFound = true + } + if addTargetBlank && !targetBlankFound { + htmlAttr.Val = "_blank" + targetBlankFound = true + tmpAttrs = append(tmpAttrs, htmlAttr) + appended = true + } + } + + if !appended { + tmpAttrs = append(tmpAttrs, htmlAttr) + } + } + if noFollowFound || noReferrerFound || targetBlankFound { + cleanAttrs = tmpAttrs + } + + if (addNoFollow && !noFollowFound) || (addNoReferrer && !noReferrerFound) { + rel := html.Attribute{} + rel.Key = "rel" + if addNoFollow { + rel.Val = "nofollow" + } + if addNoReferrer { + if rel.Val != "" { + rel.Val += " " + } + rel.Val += "noreferrer" + } + cleanAttrs = append(cleanAttrs, rel) + } + + if elementName == "a" && addTargetBlank && !targetBlankFound { + rel := html.Attribute{} + rel.Key = "target" + rel.Val = "_blank" + targetBlankFound = true + cleanAttrs = append(cleanAttrs, rel) + } + + if targetBlankFound { + // target="_blank" has a security risk that allows the + // opened window/tab to issue JavaScript calls against + // window.opener, which in effect allow the destination + // of the link to control the source: + // https://dev.to/ben/the-targetblank-vulnerability-by-example + // + // To mitigate this risk, we need to add a specific rel + // attribute if it is not already present. + // rel="noopener" + // + // Unfortunately this is processing the rel twice (we + // already looked at it earlier ^^) as we cannot be sure + // of the ordering of the href and rel, and whether we + // have fully satisfied that we need to do this. This + // double processing only happens *if* target="_blank" + // is true. + var noOpenerAdded bool + tmpAttrs := []html.Attribute{} + for _, htmlAttr := range cleanAttrs { + var appended bool + if htmlAttr.Key == "rel" { + if strings.Contains(htmlAttr.Val, "noopener") { + noOpenerAdded = true + tmpAttrs = append(tmpAttrs, htmlAttr) + } else { + htmlAttr.Val += " noopener" + noOpenerAdded = true + tmpAttrs = append(tmpAttrs, htmlAttr) + } + + appended = true + } + if !appended { + tmpAttrs = append(tmpAttrs, htmlAttr) + } + } + if noOpenerAdded { + cleanAttrs = tmpAttrs + } else { + // rel attr was not found, or else noopener would + // have been added already + rel := html.Attribute{} + rel.Key = "rel" + rel.Val = "noopener" + cleanAttrs = append(cleanAttrs, rel) + } + + } + } + default: + } + } + } + + if p.requireCrossOriginAnonymous && len(cleanAttrs) > 0 { + switch elementName { + case "audio", "img", "link", "script", "video": + var crossOriginFound bool + for i, htmlAttr := range cleanAttrs { + if htmlAttr.Key == "crossorigin" { + crossOriginFound = true + cleanAttrs[i].Val = "anonymous" + } + } + + if !crossOriginFound { + crossOrigin := html.Attribute{} + crossOrigin.Key = "crossorigin" + crossOrigin.Val = "anonymous" + cleanAttrs = append(cleanAttrs, crossOrigin) + } + } + } + + if p.requireSandboxOnIFrame != nil && elementName == "iframe" { + var sandboxFound bool + for i, htmlAttr := range cleanAttrs { + if htmlAttr.Key == "sandbox" { + sandboxFound = true + var cleanVals []string + cleanValsSet := make(map[string]bool) + for _, val := range strings.Fields(htmlAttr.Val) { + if p.requireSandboxOnIFrame[val] { + if !cleanValsSet[val] { + cleanVals = append(cleanVals, val) + cleanValsSet[val] = true + } + } + } + cleanAttrs[i].Val = strings.Join(cleanVals, " ") + } + } + + if !sandboxFound { + sandbox := html.Attribute{} + sandbox.Key = "sandbox" + sandbox.Val = "" + cleanAttrs = append(cleanAttrs, sandbox) + } + } + + return cleanAttrs +} + +func (p *Policy) sanitizeStyles(attr html.Attribute, elementName string) html.Attribute { + sps := p.elsAndStyles[elementName] + if len(sps) == 0 { + sps = map[string][]stylePolicy{} + // check for any matching elements, if we don't already have a policy found + // if multiple matches are found they will be overwritten, it's best + // to not have overlapping matchers + for regex, policies := range p.elsMatchingAndStyles { + if regex.MatchString(elementName) { + for k, v := range policies { + sps[k] = append(sps[k], v...) + } + } + } + } + + //Add semi-colon to end to fix parsing issue + attr.Val = strings.TrimRight(attr.Val, " ") + if len(attr.Val) > 0 && attr.Val[len(attr.Val)-1] != ';' { + attr.Val = attr.Val + ";" + } + decs, err := parser.ParseDeclarations(attr.Val) + if err != nil { + attr.Val = "" + return attr + } + clean := []string{} + prefixes := []string{"-webkit-", "-moz-", "-ms-", "-o-", "mso-", "-xv-", "-atsc-", "-wap-", "-khtml-", "prince-", "-ah-", "-hp-", "-ro-", "-rim-", "-tc-"} + +decLoop: + for _, dec := range decs { + tempProperty := strings.ToLower(dec.Property) + tempValue := removeUnicode(strings.ToLower(dec.Value)) + for _, i := range prefixes { + tempProperty = strings.TrimPrefix(tempProperty, i) + } + if spl, ok := sps[tempProperty]; ok { + for _, sp := range spl { + if sp.handler != nil { + if sp.handler(tempValue) { + clean = append(clean, dec.Property+": "+dec.Value) + continue decLoop + } + } else if len(sp.enum) > 0 { + if stringInSlice(tempValue, sp.enum) { + clean = append(clean, dec.Property+": "+dec.Value) + continue decLoop + } + } else if sp.regexp != nil { + if sp.regexp.MatchString(tempValue) { + clean = append(clean, dec.Property+": "+dec.Value) + continue decLoop + } + } + } + } + if spl, ok := p.globalStyles[tempProperty]; ok { + for _, sp := range spl { + if sp.handler != nil { + if sp.handler(tempValue) { + clean = append(clean, dec.Property+": "+dec.Value) + continue decLoop + } + } else if len(sp.enum) > 0 { + if stringInSlice(tempValue, sp.enum) { + clean = append(clean, dec.Property+": "+dec.Value) + continue decLoop + } + } else if sp.regexp != nil { + if sp.regexp.MatchString(tempValue) { + clean = append(clean, dec.Property+": "+dec.Value) + continue decLoop + } + } + } + } + } + if len(clean) > 0 { + attr.Val = strings.Join(clean, "; ") + } else { + attr.Val = "" + } + return attr +} + +func (p *Policy) allowNoAttrs(elementName string) bool { + _, ok := p.setOfElementsAllowedWithoutAttrs[elementName] + if !ok { + for _, r := range p.setOfElementsMatchingAllowedWithoutAttrs { + if r.MatchString(elementName) { + ok = true + break + } + } + } + return ok +} + +func (p *Policy) validURL(rawurl string) (string, bool) { + if p.requireParseableURLs { + // URLs are valid if when space is trimmed the URL is valid + rawurl = strings.TrimSpace(rawurl) + + // URLs cannot contain whitespace, unless it is a data-uri + if strings.Contains(rawurl, " ") || + strings.Contains(rawurl, "\t") || + strings.Contains(rawurl, "\n") { + if !strings.HasPrefix(rawurl, `data:`) { + return "", false + } + + // Remove \r and \n from base64 encoded data to pass url.Parse. + matched := dataURIbase64Prefix.FindString(rawurl) + if matched != "" { + rawurl = matched + strings.Replace( + strings.Replace( + rawurl[len(matched):], + "\r", + "", + -1, + ), + "\n", + "", + -1, + ) + } + } + + // URLs are valid if they parse + u, err := url.Parse(rawurl) + if err != nil { + return "", false + } + + if u.Scheme != "" { + urlPolicies, ok := p.allowURLSchemes[u.Scheme] + if !ok { + for _, r := range p.allowURLSchemeRegexps { + if r.MatchString(u.Scheme) { + return u.String(), true + } + } + + return "", false + } + + if len(urlPolicies) == 0 { + return u.String(), true + } + + for _, urlPolicy := range urlPolicies { + if urlPolicy(u) { + return u.String(), true + } + } + + return "", false + } + + if p.allowRelativeURLs { + if u.String() != "" { + return u.String(), true + } + } + + return "", false + } + + return rawurl, true +} + +func linkable(elementName string) bool { + switch elementName { + case "a", "area", "base", "link": + // elements that allow .href + return true + case "blockquote", "del", "ins", "q": + // elements that allow .cite + return true + case "audio", "embed", "iframe", "img", "input", "script", "track", "video": + // elements that allow .src + return true + default: + return false + } +} + +// stringInSlice returns true if needle exists in haystack +func stringInSlice(needle string, haystack []string) bool { + for _, straw := range haystack { + if strings.EqualFold(straw, needle) { + return true + } + } + return false +} + +func isDataAttribute(val string) bool { + if !dataAttribute.MatchString(val) { + return false + } + rest := strings.Split(val, "data-") + if len(rest) == 1 { + return false + } + // data-xml* is invalid. + if dataAttributeXMLPrefix.MatchString(rest[1]) { + return false + } + // no uppercase or semi-colons allowed. + if dataAttributeInvalidChars.MatchString(rest[1]) { + return false + } + return true +} + +func removeUnicode(value string) string { + substitutedValue := value + currentLoc := cssUnicodeChar.FindStringIndex(substitutedValue) + for currentLoc != nil { + + character := substitutedValue[currentLoc[0]+1 : currentLoc[1]] + character = strings.TrimSpace(character) + if len(character) < 4 { + character = strings.Repeat("0", 4-len(character)) + character + } else { + for len(character) > 4 { + if character[0] != '0' { + character = "" + break + } else { + character = character[1:] + } + } + } + character = "\\u" + character + translatedChar, err := strconv.Unquote(`"` + character + `"`) + translatedChar = strings.TrimSpace(translatedChar) + if err != nil { + return "" + } + substitutedValue = substitutedValue[0:currentLoc[0]] + translatedChar + substitutedValue[currentLoc[1]:] + currentLoc = cssUnicodeChar.FindStringIndex(substitutedValue) + } + return substitutedValue +} + +func (p *Policy) matchRegex(elementName string) (map[string][]attrPolicy, bool) { + aps := make(map[string][]attrPolicy, 0) + matched := false + for regex, attrs := range p.elsMatchingAndAttrs { + if regex.MatchString(elementName) { + matched = true + for k, v := range attrs { + aps[k] = append(aps[k], v...) + } + } + } + return aps, matched +} + +// normaliseElementName takes a HTML element like