diff --git a/use_case_examples/deployment/breast_cancer/README.md b/use_case_examples/deployment/breast_cancer/README.md index a909ee3fe2..0e6c2110bf 100644 --- a/use_case_examples/deployment/breast_cancer/README.md +++ b/use_case_examples/deployment/breast_cancer/README.md @@ -8,14 +8,16 @@ To run this example on AWS you will also need to have the AWS CLI properly setup To do so please refer to [AWS documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html). One can also run this example locally using Docker, or just by running the scripts locally. -1. To train your model you can use `train.py`, or `train_with_docker.sh` to use Docker (recommended way). +1. To train your model you can + - use `train_with_docker.sh` to use Docker (recommended way), + - or, if you know what you're doing and will manage Python versions, use `train.py` This will train a model and [serialize the FHE circuit](../../../docs/guides/client_server.md) in a new folder called `./dev`. 1. Once that's done you can use the script provided in Concrete ML in `use_case_examples/deployment/server/`, use `deploy_to_docker.py`. - `python use_case_examples/deployment/server/deploy_to_docker.py --path-to-model ./dev` -3. Once that's done you can launch the `build_docker_client_image.py` script to build a client Docker image. +1. Once that's done you can launch the `build_docker_client_image.py` script to build a client Docker image. 1. You can then run the client by using the `client.sh` script. This will run the container in interactive mode. - To interact with the server you can launch the `client.py` script using `URL="" python client.py` where `` is the content of the `url.txt` file (default is `0.0.0.0`, ip to use when running server in Docker on localhost). +1. Then, in the previous Docker, you can launch the `client.py` script to interact with the server, using `URL="" python client.py` where `` is the content of the `url.txt` file (if you don't set URL, the default is `0.0.0.0`; this defines the IP to use when running server in Docker on localhost). -And here it is you deployed a Concrete ML model and ran an inference using Fully Homormophic Encryption. +And here it is you deployed a Concrete ML model and ran an inference using Fully Homormophic Encryption. In particular, you will see that the FHE predictions are correct. diff --git a/use_case_examples/deployment/breast_cancer/client.py b/use_case_examples/deployment/breast_cancer/client.py index a1b8d225bb..4b396fdb99 100644 --- a/use_case_examples/deployment/breast_cancer/client.py +++ b/use_case_examples/deployment/breast_cancer/client.py @@ -21,7 +21,7 @@ from concrete.ml.deployment import FHEModelClient -URL = os.environ.get("URL", f"http://localhost:5000") +URL = os.environ.get("URL", f"http://localhost:8888") STATUS_OK = 200 ROOT = Path(__file__).parent / "client" ROOT.mkdir(exist_ok=True) @@ -105,4 +105,13 @@ encrypted_result = result.content decrypted_prediction = client.deserialize_decrypt_dequantize(encrypted_result)[0] decrypted_predictions.append(decrypted_prediction) - print(decrypted_predictions) + print(f"Decrypted predictions are: {decrypted_predictions}") + + decrypted_predictions_classes = numpy.array(decrypted_predictions).argmax(axis=1) + print(f"Decrypted prediction classes are: {decrypted_predictions_classes}") + + # Let's check the results and compare them against the clear model + clear_prediction_classes = y[0:10] + accuracy = (clear_prediction_classes == decrypted_predictions_classes).mean() + print(f"Accuracy between FHE prediction and expected results is: {accuracy*100:.0f}%") + diff --git a/use_case_examples/deployment/breast_cancer/client.sh b/use_case_examples/deployment/breast_cancer/client.sh old mode 100644 new mode 100755 diff --git a/use_case_examples/deployment/breast_cancer/client_requirements.txt b/use_case_examples/deployment/breast_cancer/client_requirements.txt index fc58e57cf6..d1faae9a46 100644 --- a/use_case_examples/deployment/breast_cancer/client_requirements.txt +++ b/use_case_examples/deployment/breast_cancer/client_requirements.txt @@ -1,3 +1,6 @@ grequests requests tqdm +numpy +scikit-learn +concrete-ml \ No newline at end of file diff --git a/use_case_examples/deployment/breast_cancer/train.py b/use_case_examples/deployment/breast_cancer/train.py index 494354a2dd..da4c5d23d5 100644 --- a/use_case_examples/deployment/breast_cancer/train.py +++ b/use_case_examples/deployment/breast_cancer/train.py @@ -20,4 +20,4 @@ model.fit(X_train, y_train) model.compile(X_train) dev = FHEModelDev("./dev", model) - dev.save() + dev.save(via_mlir=True) diff --git a/use_case_examples/deployment/server/deploy_to_docker.py b/use_case_examples/deployment/server/deploy_to_docker.py index 881a3b9bcd..9dd1a93146 100644 --- a/use_case_examples/deployment/server/deploy_to_docker.py +++ b/use_case_examples/deployment/server/deploy_to_docker.py @@ -97,11 +97,13 @@ def main(path_to_model: Path, image_name: str): if args.only_build: return + PORT_TO_CHOOSE=8888 + # Run newly created Docker server try: with open("./url.txt", mode="w", encoding="utf-8") as file: - file.write("http://localhost:5000") - subprocess.check_output(f"docker run -p 5000:5000 {image_name}", shell=True) + file.write(f"http://localhost:{PORT_TO_CHOOSE}") + subprocess.check_output(f"docker run -p {PORT_TO_CHOOSE}:5000 {image_name}", shell=True) except KeyboardInterrupt: message = "Terminate container? (y/n) " shutdown_instance = input(message).lower()