@@ -57,6 +57,16 @@ config ZRAM_BACKEND_LZO
select LZO_COMPRESS
select LZO_DECOMPRESS
+config ZRAM_BACKEND_CRYPTO_API
+ bool "Enable support for compression algorithms available in Crypto API"
+ depends on ZRAM
+ default y
+ select CRYPTO
+ help
+ If you still want to use Crypto API as a backend, enable this option.
+ All compression algorithms enabled on your system will be available in ZRAM.
+ This option is useful if you are using hardware compression using any driver.
+
choice
prompt "Default zram compressor"
default ZRAM_DEF_COMP_LZORLE
@@ -8,5 +8,6 @@ zram-$(CONFIG_ZRAM_BACKEND_LZ4HC) += backend_lz4hc.o
zram-$(CONFIG_ZRAM_BACKEND_ZSTD) += backend_zstd.o
zram-$(CONFIG_ZRAM_BACKEND_DEFLATE) += backend_deflate.o
zram-$(CONFIG_ZRAM_BACKEND_842) += backend_842.o
+zram-$(CONFIG_ZRAM_BACKEND_CRYPTO_API) += backend_crypto_api.o
obj-$(CONFIG_ZRAM) += zram.o
new file mode 100644
@@ -0,0 +1,117 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+
+#include <linux/kernel.h>
+#include <linux/slab.h>
+#include <linux/crypto.h>
+
+#include "backend_crypto_api.h"
+
+struct crypto_api_ctx {
+ struct crypto_comp *tfm;
+};
+
+extern struct list_head crypto_alg_list;
+
+static void crypto_api_release_params(struct zcomp_params *params)
+{
+}
+
+static int crypto_api_setup_params(struct zcomp_params *params)
+{
+ return 0;
+}
+
+static int crypto_api_create(struct zcomp *zcomp, struct zcomp_ctx *ctx)
+{
+ struct crypto_api_ctx *crypto_ctx;
+ const char *algname = zcomp->ops->name;
+
+ crypto_ctx = kzalloc(sizeof(*crypto_ctx), GFP_KERNEL);
+ if (!crypto_ctx)
+ return -ENOMEM;
+
+ crypto_ctx->tfm = crypto_alloc_comp(algname, 0, 0);
+ if (IS_ERR_OR_NULL(crypto_ctx->tfm)) {
+ kfree(crypto_ctx);
+ return -ENOMEM;
+ }
+
+ ctx->context = crypto_ctx;
+
+ return 0;
+}
+
+static void crypto_api_destroy(struct zcomp_ctx *ctx)
+{
+ struct crypto_api_ctx *crypto_ctx = ctx->context;
+
+ if (!IS_ERR_OR_NULL(crypto_ctx->tfm))
+ crypto_free_comp(crypto_ctx->tfm);
+
+ kfree(crypto_ctx);
+}
+
+static int crypto_api_compress(struct zcomp_params *params, struct zcomp_ctx *ctx,
+ struct zcomp_req *req)
+{
+ struct crypto_api_ctx *crypto_ctx = ctx->context;
+ unsigned int dst_len = req->dst_len;
+ int ret;
+
+ ret = crypto_comp_compress(crypto_ctx->tfm,
+ req->src, req->src_len,
+ req->dst, &dst_len);
+
+ req->dst_len = dst_len;
+
+ return ret;
+}
+
+static int crypto_api_decompress(struct zcomp_params *params, struct zcomp_ctx *ctx,
+ struct zcomp_req *req)
+{
+ struct crypto_api_ctx *crypto_ctx = ctx->context;
+ unsigned int dst_len = req->dst_len;
+ int ret;
+
+ ret = crypto_comp_decompress(crypto_ctx->tfm,
+ req->src, req->src_len,
+ req->dst, &dst_len);
+
+ req->dst_len = dst_len;
+
+ return ret;
+}
+
+static void crypto_api_destroy_ops(struct zcomp_ops *ops)
+{
+ kfree(ops->name);
+ kfree(ops);
+}
+
+struct zcomp_ops *get_backend_crypto_api(const char *name)
+{
+ struct zcomp_ops *ops;
+ char *algname;
+
+ ops = kmalloc(sizeof(*ops), GFP_KERNEL);
+ if (!ops)
+ return ERR_PTR(-ENOMEM);
+
+ algname = kstrdup(name, GFP_KERNEL);
+ if (!algname) {
+ kfree(ops);
+ return ERR_PTR(-ENOMEM);
+ }
+
+ ops->compress = crypto_api_compress;
+ ops->decompress = crypto_api_decompress,
+ ops->create_ctx = crypto_api_create,
+ ops->destroy_ctx = crypto_api_destroy,
+ ops->setup_params = crypto_api_setup_params,
+ ops->release_params = crypto_api_release_params,
+ ops->destroy = crypto_api_destroy_ops,
+ ops->name = algname;
+
+ return ops;
+}
new file mode 100644
@@ -0,0 +1,10 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+
+#ifndef __BACKEND_CRYPTO_API_H__
+#define __BACKEND_CRYPTO_API_H__
+
+#include "zcomp.h"
+
+struct zcomp_ops *get_backend_crypto_api(const char *name);
+
+#endif /* __BACKEND_CRYPTO_API_H__ */
@@ -20,6 +20,9 @@
#include "backend_zstd.h"
#include "backend_deflate.h"
#include "backend_842.h"
+#include "backend_crypto_api.h"
+
+extern struct list_head crypto_alg_list;
static LIST_HEAD(backends);
@@ -216,63 +219,117 @@ void clean_zcomp_backends(void)
backend->destroy(backend);
}
+static bool backend_enabled(const char *name)
+{
+ struct zcomp_ops *backend;
+
+ list_for_each_entry(backend, &backends, list)
+ if (!strcmp(backend->name, name))
+ return true;
+
+ return false;
+}
+
+static int init_crypto_api_backends(void)
+{
+ struct crypto_alg *alg;
+ struct zcomp_ops *ops;
+
+ list_for_each_entry(alg, &crypto_alg_list, cra_list) {
+ if (!crypto_has_comp(alg->cra_name, 0, 0))
+ continue;
+
+ if (backend_enabled(alg->cra_name))
+ continue;
+
+ ops = get_backend_crypto_api(alg->cra_name);
+ if (IS_ERR_OR_NULL(ops))
+ return PTR_ERR(ops);
+
+ list_add(&ops->list, &backends);
+ }
+
+ return 0;
+}
+
int init_zcomp_backends(void)
{
struct zcomp_ops *ops;
+ int ret;
#if IS_ENABLED(CONFIG_ZRAM_BACKEND_LZO)
ops = get_backend_lzorle();
- if (IS_ERR_OR_NULL(ops))
+ if (IS_ERR_OR_NULL(ops)) {
+ ret = PTR_ERR(ops);
goto err;
+ }
list_add(&ops->list, &backends);
ops = get_backend_lzo();
- if (IS_ERR_OR_NULL(ops))
+ if (IS_ERR_OR_NULL(ops)) {
+ ret = PTR_ERR(ops);
goto err;
+ }
list_add(&ops->list, &backends);
#endif
#if IS_ENABLED(CONFIG_ZRAM_BACKEND_LZ4)
ops = get_backend_lz4();
- if (IS_ERR_OR_NULL(ops))
+ if (IS_ERR_OR_NULL(ops)) {
+ ret = PTR_ERR(ops);
goto err;
+ }
list_add(&ops->list, &backends);
#endif
#if IS_ENABLED(CONFIG_ZRAM_BACKEND_LZ4HC)
ops = get_backend_lz4hc();
- if (IS_ERR_OR_NULL(ops))
+ if (IS_ERR_OR_NULL(ops)) {
+ ret = PTR_ERR(ops);
goto err;
+ }
list_add(&ops->list, &backends);
#endif
#if IS_ENABLED(CONFIG_ZRAM_BACKEND_ZSTD)
ops = get_backend_zstd();
- if (IS_ERR_OR_NULL(ops))
+ if (IS_ERR_OR_NULL(ops)) {
+ ret = PTR_ERR(ops);
goto err;
+ }
list_add(&ops->list, &backends);
#endif
#if IS_ENABLED(CONFIG_ZRAM_BACKEND_DEFLATE)
ops = get_backend_deflate();
- if (IS_ERR_OR_NULL(ops))
+ if (IS_ERR_OR_NULL(ops)) {
+ ret = PTR_ERR(ops);
goto err;
+ }
list_add(&ops->list, &backends);
#endif
#if IS_ENABLED(CONFIG_ZRAM_BACKEND_842)
ops = get_backend_842();
- if (IS_ERR_OR_NULL(ops))
+ if (IS_ERR_OR_NULL(ops)) {
+ ret = PTR_ERR(ops);
goto err;
+ }
list_add(&ops->list, &backends);
#endif
+#if IS_ENABLED(CONFIG_ZRAM_BACKEND_CRYPTO_API)
+ ret = init_crypto_api_backends();
+ if (ret)
+ goto err;
+#endif
+
return 0;
err:
clean_zcomp_backends();
- return PTR_ERR(ops);
+ return ret;
}
Since we use custom backend implementation, we remove the ability for users to use algorithms from crypto backend. This breaks backward compatibility, user doesn't necessarily use one of the algorithms from "custom" backends defined in zram folder. For example, he can use some driver with hardware compression support. This patch adds opinion to enable Crypto API: add ZRAM_BACKEND_CRYPTO_API. Option is enabled by default, because in previously version of ZRAM it was possible to choose any alogirthm using Crypto API. This is also done for backward compatibility purposes. Signed-off-by: Alexey Romanov <avromanov@salutedevices.com> --- drivers/block/zram/Kconfig | 10 ++ drivers/block/zram/Makefile | 1 + drivers/block/zram/backend_crypto_api.c | 117 ++++++++++++++++++++++++ drivers/block/zram/backend_crypto_api.h | 10 ++ drivers/block/zram/zcomp.c | 73 +++++++++++++-- 5 files changed, 203 insertions(+), 8 deletions(-) create mode 100644 drivers/block/zram/backend_crypto_api.c create mode 100644 drivers/block/zram/backend_crypto_api.h