blob: 84c708bba00b9b546578b45efbd3f6d3e0efc906 [file] [log] [blame]
Thomas Gleixner2874c5f2019-05-27 08:55:01 +02001/* SPDX-License-Identifier: GPL-2.0-or-later */
Corentin LABBE2589ad82016-08-31 14:02:57 +02002/*
3 * Crypto engine API
4 *
5 * Copyright (c) 2016 Baolin Wang <baolin.wang@linaro.org>
Corentin LABBE2589ad82016-08-31 14:02:57 +02006 */
7#ifndef _CRYPTO_ENGINE_H
8#define _CRYPTO_ENGINE_H
9
10#include <linux/crypto.h>
11#include <linux/list.h>
12#include <linux/kernel.h>
13#include <linux/kthread.h>
14#include <crypto/algapi.h>
Corentin LABBE218d1cc2018-01-26 20:15:30 +010015#include <crypto/aead.h>
16#include <crypto/akcipher.h>
Corentin LABBE4cba7cf2016-08-31 14:02:58 +020017#include <crypto/hash.h>
Corentin LABBE218d1cc2018-01-26 20:15:30 +010018#include <crypto/skcipher.h>
Corentin LABBE2589ad82016-08-31 14:02:57 +020019
20#define ENGINE_NAME_LEN 30
21/*
22 * struct crypto_engine - crypto hardware engine
23 * @name: the engine name
24 * @idling: the engine is entering idle state
25 * @busy: request pump is busy
26 * @running: the engine is on working
27 * @cur_req_prepared: current request is prepared
28 * @list: link with the global crypto engine list
29 * @queue_lock: spinlock to syncronise access to request queue
30 * @queue: the crypto queue of the engine
31 * @rt: whether this queue is set to run as a realtime task
32 * @prepare_crypt_hardware: a request will soon arrive from the queue
33 * so the subsystem requests the driver to prepare the hardware
34 * by issuing this call
35 * @unprepare_crypt_hardware: there are currently no more requests on the
36 * queue so the subsystem notifies the driver that it may relax the
37 * hardware by issuing this call
Petr Mladekc4ca2b02016-10-19 13:54:30 +020038 * @kworker: kthread worker struct for request pump
Corentin LABBE2589ad82016-08-31 14:02:57 +020039 * @pump_requests: work struct for scheduling work to the request pump
40 * @priv_data: the engine private data
41 * @cur_req: the current request which is on processing
42 */
43struct crypto_engine {
44 char name[ENGINE_NAME_LEN];
45 bool idling;
46 bool busy;
47 bool running;
48 bool cur_req_prepared;
49
50 struct list_head list;
51 spinlock_t queue_lock;
52 struct crypto_queue queue;
Corentin LABBE88d58ef2017-06-06 15:44:16 +020053 struct device *dev;
Corentin LABBE2589ad82016-08-31 14:02:57 +020054
55 bool rt;
56
57 int (*prepare_crypt_hardware)(struct crypto_engine *engine);
58 int (*unprepare_crypt_hardware)(struct crypto_engine *engine);
59
Petr Mladekc4ca2b02016-10-19 13:54:30 +020060 struct kthread_worker *kworker;
Corentin LABBE2589ad82016-08-31 14:02:57 +020061 struct kthread_work pump_requests;
62
63 void *priv_data;
Corentin LABBE4cba7cf2016-08-31 14:02:58 +020064 struct crypto_async_request *cur_req;
Corentin LABBE2589ad82016-08-31 14:02:57 +020065};
66
Corentin LABBE218d1cc2018-01-26 20:15:30 +010067/*
68 * struct crypto_engine_op - crypto hardware engine operations
69 * @prepare__request: do some prepare if need before handle the current request
70 * @unprepare_request: undo any work done by prepare_request()
71 * @do_one_request: do encryption for current request
72 */
73struct crypto_engine_op {
74 int (*prepare_request)(struct crypto_engine *engine,
75 void *areq);
76 int (*unprepare_request)(struct crypto_engine *engine,
77 void *areq);
78 int (*do_one_request)(struct crypto_engine *engine,
79 void *areq);
80};
81
82struct crypto_engine_ctx {
83 struct crypto_engine_op op;
84};
85
86int crypto_transfer_ablkcipher_request_to_engine(struct crypto_engine *engine,
87 struct ablkcipher_request *req);
88int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
89 struct aead_request *req);
90int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
91 struct akcipher_request *req);
Corentin LABBE4cba7cf2016-08-31 14:02:58 +020092int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
Corentin LABBE218d1cc2018-01-26 20:15:30 +010093 struct ahash_request *req);
94int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
95 struct skcipher_request *req);
96void crypto_finalize_ablkcipher_request(struct crypto_engine *engine,
97 struct ablkcipher_request *req, int err);
98void crypto_finalize_aead_request(struct crypto_engine *engine,
99 struct aead_request *req, int err);
100void crypto_finalize_akcipher_request(struct crypto_engine *engine,
101 struct akcipher_request *req, int err);
Corentin LABBE4cba7cf2016-08-31 14:02:58 +0200102void crypto_finalize_hash_request(struct crypto_engine *engine,
103 struct ahash_request *req, int err);
Corentin LABBE218d1cc2018-01-26 20:15:30 +0100104void crypto_finalize_skcipher_request(struct crypto_engine *engine,
105 struct skcipher_request *req, int err);
Corentin LABBE2589ad82016-08-31 14:02:57 +0200106int crypto_engine_start(struct crypto_engine *engine);
107int crypto_engine_stop(struct crypto_engine *engine);
108struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt);
109int crypto_engine_exit(struct crypto_engine *engine);
110
111#endif /* _CRYPTO_ENGINE_H */