diff --git a/config.py b/config.py index 268dfc9..411aa1d 100644 --- a/config.py +++ b/config.py @@ -13,7 +13,10 @@ "loras": [], "inverters": [], "scheduler": "pndm", - "controlnet": "lllyasviel/sd-controlnet-canny" + "controlnet": "lllyasviel/sd-controlnet-canny", + "sd_turbo_model_name": "stabilityai/sdxl-turbo", + "controlnet": "lllyasviel/sd-controlnet-canny", + "model_urls": {"segment-anything": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_h_4b8939.pth"} } endpoints = { "generate_image": { diff --git a/plugin.py b/plugin.py index 5f96941..6904077 100644 --- a/plugin.py +++ b/plugin.py @@ -100,6 +100,16 @@ def shutdown(): threading.Thread(target=self_terminate, daemon=True).start() return {"success": True} +@app.post("/plugins/install") +def install_plugin(): + sd_plugin.on_install(sd_plugin.config.get('model_urls', {}), sd_plugin.progress_callback) + return {"message": "Plugin installation started"} + +@app.post("/plugins/uninstall") +def uninstall_plugin(): + sd_plugin.on_uninstall(sd_plugin.progress_callback) + return {"message": "Plugin uninstallation started"} + class SD(Plugin): """ Prediction inference. @@ -290,4 +300,4 @@ def load_textual_inversion(self): def controlnet_predict(self, prompt: str, image, seed): embed_prompt, generator = self.prep_inputs(seed, prompt) output_img = self.controlpipe(prompt_embeds=embed_prompt, generator=generator, image = image, num_inference_steps=25).images[0] - return output_img \ No newline at end of file + return output_img