Spaces:
Runtime error
Runtime error
Upload 2 files
Browse files
app.py
CHANGED
|
@@ -24,7 +24,7 @@ REPO_DIR = Path(__file__).parent
|
|
| 24 |
subprocess.Popen(["uvicorn", "server:app"], cwd=REPO_DIR)
|
| 25 |
|
| 26 |
|
| 27 |
-
#subprocess.Popen(["uvicorn", "server:app", "--port", "3000"], cwd=REPO_DIR)
|
| 28 |
|
| 29 |
# if not exists, create a directory for the FHE keys called .fhe_keys
|
| 30 |
if not os.path.exists(".fhe_keys"):
|
|
@@ -190,30 +190,25 @@ def process_pipeline(test_file):
|
|
| 190 |
return eval_key, encodings, encrypted_quantized_encoding, encrypted_prediction
|
| 191 |
|
| 192 |
if __name__ == "__main__":
|
| 193 |
-
|
| 194 |
with gr.Blocks() as demo:
|
| 195 |
print("Starting the FHE Model")
|
| 196 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 197 |
|
| 198 |
-
|
| 199 |
-
|
| 200 |
demo.launch() #share=True)
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
"""
|
| 204 |
-
app = gr.Interface(
|
| 205 |
-
|
| 206 |
-
fn=process_pipeline,
|
| 207 |
-
inputs=[
|
| 208 |
-
gr.File(label="Test File"),
|
| 209 |
-
],
|
| 210 |
-
outputs=[
|
| 211 |
-
gr.Textbox(label="Evaluation Key"),
|
| 212 |
-
gr.Textbox(label="Encodings"),
|
| 213 |
-
gr.Textbox(label="Encrypted Quantized Encoding"),
|
| 214 |
-
gr.Textbox(label="Encrypted Prediction"),
|
| 215 |
-
],
|
| 216 |
-
title="FHE Model",
|
| 217 |
-
description="This is a FHE Model",
|
| 218 |
-
)
|
| 219 |
-
"""
|
|
|
|
| 24 |
subprocess.Popen(["uvicorn", "server:app"], cwd=REPO_DIR)
|
| 25 |
|
| 26 |
|
| 27 |
+
# subprocess.Popen(["uvicorn", "server:app", "--port", "3000"], cwd=REPO_DIR)
|
| 28 |
|
| 29 |
# if not exists, create a directory for the FHE keys called .fhe_keys
|
| 30 |
if not os.path.exists(".fhe_keys"):
|
|
|
|
| 190 |
return eval_key, encodings, encrypted_quantized_encoding, encrypted_prediction
|
| 191 |
|
| 192 |
if __name__ == "__main__":
|
| 193 |
+
|
| 194 |
with gr.Blocks() as demo:
|
| 195 |
print("Starting the FHE Model")
|
| 196 |
|
| 197 |
+
fn = (process_pipeline,)
|
| 198 |
+
inputs = (
|
| 199 |
+
[
|
| 200 |
+
gr.File(label="Test File"),
|
| 201 |
+
],
|
| 202 |
+
)
|
| 203 |
+
outputs = (
|
| 204 |
+
[
|
| 205 |
+
gr.Textbox(label="Evaluation Key"),
|
| 206 |
+
gr.Textbox(label="Encodings"),
|
| 207 |
+
gr.Textbox(label="Encrypted Quantized Encoding"),
|
| 208 |
+
gr.Textbox(label="Encrypted Prediction"),
|
| 209 |
+
],
|
| 210 |
+
)
|
| 211 |
+
title = ("FHE Model",)
|
| 212 |
+
description = ("This is a FHE Model",)
|
| 213 |
|
|
|
|
|
|
|
| 214 |
demo.launch() #share=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server.py
CHANGED
|
@@ -9,11 +9,13 @@ import uvicorn
|
|
| 9 |
|
| 10 |
|
| 11 |
current_dir = Path(__file__).parent
|
| 12 |
-
|
| 13 |
# Load the model
|
| 14 |
fhe_model = FHEModelServer(
|
| 15 |
Path.joinpath(current_dir, "fhe_model")
|
| 16 |
)
|
|
|
|
|
|
|
| 17 |
class PredictRequest(BaseModel):
|
| 18 |
evaluation_key: str
|
| 19 |
encrypted_encoding: str
|
|
@@ -39,5 +41,5 @@ def predict(query: PredictRequest):
|
|
| 39 |
encoded_prediction = base64.b64encode(prediction).decode()
|
| 40 |
return {"encrypted_prediction": encoded_prediction}
|
| 41 |
|
| 42 |
-
#if __name__ == "__main__":
|
| 43 |
-
# uvicorn.run(app, host="0.0.0.0", port=3000)
|
|
|
|
| 9 |
|
| 10 |
|
| 11 |
current_dir = Path(__file__).parent
|
| 12 |
+
print('1111', current_dir)
|
| 13 |
# Load the model
|
| 14 |
fhe_model = FHEModelServer(
|
| 15 |
Path.joinpath(current_dir, "fhe_model")
|
| 16 |
)
|
| 17 |
+
print(Path.joinpath(current_dir, "fhe_model"))
|
| 18 |
+
print(fhe_model)
|
| 19 |
class PredictRequest(BaseModel):
|
| 20 |
evaluation_key: str
|
| 21 |
encrypted_encoding: str
|
|
|
|
| 41 |
encoded_prediction = base64.b64encode(prediction).decode()
|
| 42 |
return {"encrypted_prediction": encoded_prediction}
|
| 43 |
|
| 44 |
+
# if __name__ == "__main__":
|
| 45 |
+
# uvicorn.run(app, host="0.0.0.0", port=3000)
|