diff --git a/PaliGemma/[PaliGemma_1]Finetune_with_image_description.ipynb b/PaliGemma/[PaliGemma_1]Finetune_with_image_description.ipynb index f547106f..dee4c08e 100644 --- a/PaliGemma/[PaliGemma_1]Finetune_with_image_description.ipynb +++ b/PaliGemma/[PaliGemma_1]Finetune_with_image_description.ipynb @@ -292,7 +292,8 @@ "TOKENIZER_PATH = \"./paligemma_tokenizer.model\"\n", "if not os.path.exists(TOKENIZER_PATH):\n", " print(\"Downloading the model tokenizer...\")\n", - " !gcloud storage cp gs://big_vision/paligemma_tokenizer.model {TOKENIZER_PATH}\n", " print(f\"Tokenizer path: {TOKENIZER_PATH}\")" + " !gcloud storage cp gs://big_vision/paligemma_tokenizer.model {TOKENIZER_PATH}\n", + " print(f\"Tokenizer path: {TOKENIZER_PATH}\")" ] }, { diff --git a/PaliGemma/[PaliGemma_1]Finetune_with_object_detection.ipynb b/PaliGemma/[PaliGemma_1]Finetune_with_object_detection.ipynb index 5f34ebbc..14c44bca 100644 --- a/PaliGemma/[PaliGemma_1]Finetune_with_object_detection.ipynb +++ b/PaliGemma/[PaliGemma_1]Finetune_with_object_detection.ipynb @@ -591,7 +591,8 @@ "TOKENIZER_PATH = \"./paligemma_tokenizer.model\"\n", "if not os.path.exists(TOKENIZER_PATH):\n", " print(\"Downloading the model tokenizer...\")\n", - " !gcloud storage cp gs://big_vision/paligemma_tokenizer.model {TOKENIZER_PATH}\n", " print(f\"Tokenizer path: {TOKENIZER_PATH}\")" + " !gcloud storage cp gs://big_vision/paligemma_tokenizer.model {TOKENIZER_PATH}\n", + " print(f\"Tokenizer path: {TOKENIZER_PATH}\")" ] }, { diff --git a/PaliGemma/[PaliGemma_2]Finetune_with_JAX.ipynb b/PaliGemma/[PaliGemma_2]Finetune_with_JAX.ipynb index 099ddf58..d7239dff 100644 --- a/PaliGemma/[PaliGemma_2]Finetune_with_JAX.ipynb +++ b/PaliGemma/[PaliGemma_2]Finetune_with_JAX.ipynb @@ -424,12 +424,14 @@ "TOKENIZER_PATH = \"./paligemma_tokenizer.model\"\n", "if not os.path.exists(TOKENIZER_PATH):\n", " print(\"Downloading the model tokenizer...\")\n", - " !gcloud storage cp gs://big_vision/paligemma_tokenizer.model {TOKENIZER_PATH}\n", " print(f\"Tokenizer path: {TOKENIZER_PATH}\")\n", + " !gcloud storage cp gs://big_vision/paligemma_tokenizer.model {TOKENIZER_PATH}\n", + " print(f\"Tokenizer path: {TOKENIZER_PATH}\")\n", "\n", "DATA_DIR=\"./longcap100\"\n", "if not os.path.exists(DATA_DIR):\n", " print(\"Downloading the dataset...\")\n", - " !gcloud storage cp --no-clobber --recursive gs://longcap100/ .\n", " print(f\"Data path: {DATA_DIR}\")" + " !gcloud storage cp --no-clobber --recursive gs://longcap100/ .\n", + " print(f\"Data path: {DATA_DIR}\")" ] }, {