Bläddra i källkod

Changed Download links , Changed seed from numpy to tensorflow , Added support for A100

Aswinkumar 3 år sedan
förälder
incheckning
6ca5fc72e3

+ 1 - 1
hpc_ai/ai_science_cfd/Dockerfile

@@ -5,7 +5,7 @@
 # Finally, open http://127.0.0.1:8888/
 
 # Select Base Image 
-FROM nvcr.io/nvidia/tensorflow:20.01-tf2-py3
+FROM nvcr.io/nvidia/tensorflow:21.05-tf2-py3
 # Update the repo
 RUN apt-get update
 # Install required dependencies

+ 2 - 4
hpc_ai/ai_science_cfd/English/python/jupyter_notebook/CFD/Part3.ipynb

@@ -493,8 +493,7 @@
    "source": [
     "#But Training our model from scratch will take a long time\n",
     "#So we will load a partially trained model to speedup the process \n",
-    "K.clear_session()\n",
-    "conv_model = tf.keras.models.load_model(\"conv_model.h5\",custom_objects={'loss_image': loss_image})\n",
+    "conv_model.load_weights(\"conv_model.h5\")\n",
     "\n",
     "history = conv_model.fit(training_dataset, epochs=5, steps_per_epoch=train_batches,\n",
     "          validation_data=validation_dataset, validation_steps=validation_batches, \n",
@@ -726,8 +725,7 @@
    "source": [
     "#But Training our model from scratch will take a long time\n",
     "#So we will load a partially trained model to speedup the process \n",
-    "K.clear_session()\n",
-    "conv_sdf_model = tf.keras.models.load_model(\"conv_sdf_model.h5\",custom_objects={'loss_image': loss_image})\n",
+    "conv_sdf_model.load_weights(\"conv_sdf_model.h5\")\n",
     "\n",
     "history = conv_sdf_model.fit(sdf_training_dataset, epochs=5, steps_per_epoch=train_batches,\n",
     "          validation_data=sdf_validation_dataset, validation_steps=validation_batches)\n",

+ 5 - 5
hpc_ai/ai_science_cfd/English/python/source_code/dataset.py

@@ -27,27 +27,27 @@
 import gdown
 import os
 ## CFD TRAIN DATASET
-url = 'https://drive.google.com/uc?id=0BzsbU65NgrSuZDBMOW93OWpsMHM&export=download'
+url = 'https://drive.google.com/uc?id=1VZOPUG6mHsRYG58H_l3_LOPM4N4f9LiZ&export=download'
 output = '/workspace/python/jupyter_notebook/CFD/data/train.tfrecords'
 gdown.download(url, output, quiet=False,proxy=None)
 
 ## CFD TEST DATASET
-url = 'https://drive.google.com/uc?id=1WSJLK0cOQehixJ6Tf5k0eYDcb4RJ5mXv&export=download'
+url = 'https://drive.google.com/uc?id=1fTo0L0ckqGEeZjLwefBc4S5e28psixle&export=download'
 output = '/workspace/python/jupyter_notebook/CFD/data/test.tfrecords'
 gdown.download(url, output, quiet=False,proxy=None)
 
 ## CFD CONV_SDF MODEL
-url = 'https://drive.google.com/uc?id=1pfR0io1CZKvXArGk-nt2wciUoAN_6Z08&export=download'
+url = 'https://drive.google.com/uc?id=1ObX4jjhv2wkaTfI-ai09SyoOqVP20jAU&export=download'
 output = '/workspace/python/jupyter_notebook/CFD/conv_sdf_model.h5'
 gdown.download(url, output, quiet=False,proxy=None)
 
 ## CFD CONV MODEL
-url = 'https://drive.google.com/uc?id=1rFhqlQnTkzIyZocjAxMffucmS3FDI0_j&export=download'
+url = 'https://drive.google.com/uc?id=1xfw9C7PFrd3e_ef92ZZbRuK__ak7mo0f&export=download'
 output = '/workspace/python/jupyter_notebook/CFD/conv_model.h5'
 gdown.download(url, output, quiet=False,proxy=None)
 
 
 ## CFD TEST Dataset
-url = 'https://drive.google.com/uc?id=0BzsbU65NgrSuR2NRRjBRMDVHaDQ&export=download'
+url = 'https://drive.google.com/uc?id=1VG9jCTBcERytV7w5bHoaVIZSQOa-AlmU&export=download'
 output = '/workspace/python/jupyter_notebook/CFD/data/computed_car_flow.zip'
 gdown.cached_download(url, output, quiet=False,proxy=None,postprocess=gdown.extractall)

+ 1 - 1
hpc_ai/ai_science_climate/Dockerfile

@@ -5,7 +5,7 @@
 # Finally, open http://127.0.0.1:8888/
 
 # Select Base Image 
-FROM nvcr.io/nvidia/tensorflow:20.01-tf2-py3
+FROM nvcr.io/nvidia/tensorflow:21.05-tf2-py3
 # Update the repo
 RUN apt-get update -y
 # Install required dependencies

+ 1 - 1
hpc_ai/ai_science_climate/English/python/jupyter_notebook/Tropical_Cyclone_Intensity_Estimation/Competition.ipynb

@@ -201,7 +201,7 @@
    "outputs": [],
    "source": [
     "import numpy as np\n",
-    "np.random.seed(1337)\n",
+    "tf.random.set_seed(1337)\n",
     "\n",
     "import tensorflow.keras\n",
     "from tensorflow.keras.models import Sequential\n",

+ 2 - 3
hpc_ai/ai_science_climate/English/python/jupyter_notebook/Tropical_Cyclone_Intensity_Estimation/Countering_Data_Imbalance.ipynb

@@ -261,7 +261,7 @@
    "outputs": [],
    "source": [
     "import numpy as np\n",
-    "np.random.seed(1337)\n",
+    "tf.random.set_seed(1337)\n",
     "\n",
     "import tensorflow.keras\n",
     "from tensorflow.keras.models import Sequential\n",
@@ -313,8 +313,7 @@
     "\n",
     "#But Training our model from scratch will take a long time\n",
     "#So we will load a partially trained model to speedup the process \n",
-    "K.clear_session()\n",
-    "model = tf.keras.models.load_model(\"trained_16.h5\",custom_objects={'top2_acc': top2_acc})\n",
+    "model.load_weights(\"trained_16.h5\")\n",
     "\n",
     "# Optimizer\n",
     "sgd = tensorflow.keras.optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9)\n",

+ 2 - 3
hpc_ai/ai_science_climate/English/python/jupyter_notebook/Tropical_Cyclone_Intensity_Estimation/Manipulation_of_Image_Data_and_Category_Determination_using_Text_Data.ipynb

@@ -472,7 +472,7 @@
     "import os\n",
     "\n",
     "os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\"\n",
-    "np.random.seed(1337)\n",
+    "tf.random.set_seed(1337)\n",
     "\n",
     "import tensorflow.keras\n",
     "from tensorflow.keras.models import Sequential\n",
@@ -541,8 +541,7 @@
     "\n",
     "#But Training our model from scratch will take a long time\n",
     "#So we will load a partially trained model to speedup the process \n",
-    "K.clear_session()\n",
-    "model = tf.keras.models.load_model(\"trained_16.h5\",custom_objects={'top2_acc': top2_acc})\n",
+    "model.load_weights(\"trained_16.h5\")\n",
     "\n",
     "# Optimizer\n",
     "sgd = tensorflow.keras.optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9)\n",

+ 2 - 2
hpc_ai/ai_science_climate/English/python/source_code/dataset.py

@@ -28,11 +28,11 @@ import gdown
 import os
 
 ## TC TL MODEL  
-url = 'https://drive.google.com/uc?id=1crZ7dHhMRuE_N-8NAzF7Nrb6AVfpxThj&export=download'
+url = 'https://drive.google.com/uc?id=1Rb9gKSDdLC8y8yMcDqeOHAGj9qA0mDLJ&export=download'
 output = '/workspace/python/jupyter_notebook/Tropical_Cyclone_Intensity_Estimation/trained_16.h5'
 gdown.download(url, output, quiet=False,proxy=None)
 ## TC Dataset  
-url = 'https://drive.google.com/uc?id=1x0vNpYMa4UM95svCL_Cxty8rzjQmG0Rz&export=download'
+url = 'https://drive.google.com/uc?id=1vMXpbWx_-DO8CNkG68eErzcREvfouT5d&export=download'
 output = '/workspace/python/jupyter_notebook/Tropical_Cyclone_Intensity_Estimation/dataset.zip'
 gdown.cached_download(url, output, quiet=False,proxy=None,postprocess=gdown.extractall)