lnalinaf commited on
Commit
75c583d
Β·
1 Parent(s): 9a72595

Upload code for launch networks

Browse files
gennet_launch_ GTEx.ipynb ADDED
@@ -0,0 +1 @@
 
 
1
+ {"cells":[{"cell_type":"markdown","metadata":{"id":"u0zNE8cvnaBp"},"source":["## Installing GenNet"]},{"cell_type":"markdown","metadata":{"id":"hwVxfoX_n4Ky"},"source":["All comments cells that open with **%%bash** are inputs for the command line.\n","\n","The requirements for GenNet can be found in the README of the github: https://github.com/ArnovanHilten/GenNet\n","\n","**Note:** GenNet can run on CPU and GPU. If you want to use GenNet with your GPU you need to ensure that you have CUDA installed that works with the tensorflow version."]},{"cell_type":"code","execution_count":1,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":1533,"status":"ok","timestamp":1684064105358,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"Bsr7IHGEl9M_","outputId":"0af30584-ebcb-4411-84fe-9247efa96b78"},"outputs":[{"output_type":"stream","name":"stderr","text":["Cloning into 'GenNet'...\n"]}],"source":["%%bash\n","# git clone https://github.com/ArnovanHilten/GenNet.git # Cloning the repositorie\n","git clone https://github.com/lnalinaf/GenNet.git"]},{"cell_type":"markdown","metadata":{"id":"L8sqIS_zygOc"},"source":["Move into the GenNet directory"]},{"cell_type":"markdown","metadata":{"id":"S-Gmfen7yjj-"},"source":["Install the requirements"]},{"cell_type":"code","execution_count":2,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"elapsed":58715,"status":"ok","timestamp":1684064303350,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"HgOeF5vUmEe9","outputId":"bc3f1676-69cb-4a21-9d59-1f3f1442a03f"},"outputs":[{"output_type":"stream","name":"stdout","text":["Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n","Requirement already satisfied: pip in /usr/local/lib/python3.10/dist-packages (23.1.2)\n","Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n","Requirement already satisfied: h5py>=2.10.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 1)) (3.8.0)\n","Requirement already satisfied: joblib>=0.16.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 2)) (1.2.0)\n","Requirement already satisfied: Markdown>=3.2.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 3)) (3.4.3)\n","Requirement already satisfied: matplotlib>=3.3.2 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 4)) (3.7.1)\n","Collecting jupyter>=1.0.0 (from -r requirements_GenNet.txt (line 5))\n"," Downloading jupyter-1.0.0-py2.py3-none-any.whl (2.7 kB)\n","Requirement already satisfied: numpy>=1.16.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 6)) (1.22.4)\n","Requirement already satisfied: pandas>=0.25.3 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 7)) (1.5.3)\n","Requirement already satisfied: Pillow>=7.2.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 8)) (8.4.0)\n","Requirement already satisfied: plotly>=4.12.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 9)) (5.13.1)\n","Requirement already satisfied: pyparsing>=2.4.7 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 10)) (3.0.9)\n","Requirement already satisfied: scikit-learn>=0.23.2 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 11)) (1.2.2)\n","Requirement already satisfied: scipy>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 12)) (1.10.1)\n","Requirement already satisfied: seaborn>=0.10.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 13)) (0.12.2)\n","Requirement already satisfied: tables>=3.6.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 14)) (3.8.0)\n","Requirement already satisfied: tqdm>=4.49.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 15)) (4.65.0)\n","Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 16)) (3.15.0)\n","Requirement already satisfied: pytest>=6.2.5 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 17)) (7.2.2)\n","Collecting protobuf<=3.20.2,>=3.11 (from -r requirements_GenNet.txt (line 18))\n"," Downloading protobuf-3.20.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.1 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.1/1.1 MB 51.9 MB/s eta 0:00:00\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 19)) (5.9.5)\n","Collecting kaleido (from -r requirements_GenNet.txt (line 20))\n"," Downloading kaleido-0.2.1-py2.py3-none-manylinux1_x86_64.whl (79.9 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 79.9/79.9 MB 12.2 MB/s eta 0:00:00\n","Collecting tensorflow==2.8.0 (from -r requirements_GenNet.txt (line 21))\n"," Downloading tensorflow-2.8.0-cp310-cp310-manylinux2010_x86_64.whl (497.6 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 497.6/497.6 MB 3.5 MB/s eta 0:00:00\n","Collecting sklearn (from -r requirements_GenNet.txt (line 22))\n"," Downloading sklearn-0.0.post5.tar.gz (3.7 kB)\n"," Preparing metadata (setup.py): started\n"," Preparing metadata (setup.py): finished with status 'done'\n","Collecting bitarray (from -r requirements_GenNet.txt (line 23))\n"," Downloading bitarray-2.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (272 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 272.7/272.7 kB 28.3 MB/s eta 0:00:00\n","Requirement already satisfied: absl-py>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.4.0)\n","Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.6.3)\n","Requirement already satisfied: flatbuffers>=1.12 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (23.3.3)\n","Requirement already satisfied: gast>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.4.0)\n","Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.2.0)\n","Collecting keras-preprocessing>=1.1.1 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading Keras_Preprocessing-1.1.2-py2.py3-none-any.whl (42 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 42.6/42.6 kB 5.9 MB/s eta 0:00:00\n","Requirement already satisfied: libclang>=9.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (16.0.0)\n","Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (3.3.0)\n","Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (67.7.2)\n","Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.16.0)\n","Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.3.0)\n","Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (4.5.0)\n","Requirement already satisfied: wrapt>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.14.1)\n","Collecting tensorboard<2.9,>=2.8 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading tensorboard-2.8.0-py3-none-any.whl (5.8 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 5.8/5.8 MB 122.0 MB/s eta 0:00:00\n","Collecting tf-estimator-nightly==2.8.0.dev2021122109 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading tf_estimator_nightly-2.8.0.dev2021122109-py2.py3-none-any.whl (462 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 462.5/462.5 kB 44.6 MB/s eta 0:00:00\n","Collecting keras<2.9,>=2.8.0rc0 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading keras-2.8.0-py2.py3-none-any.whl (1.4 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.4/1.4 MB 83.8 MB/s eta 0:00:00\n","Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.32.0)\n","Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.54.0)\n","Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (1.0.7)\n","Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (0.11.0)\n","Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (4.39.3)\n","Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (1.4.4)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (23.1)\n","Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (2.8.2)\n","Requirement already satisfied: notebook in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.4.8)\n","Collecting qtconsole (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5))\n"," Downloading qtconsole-5.4.3-py3-none-any.whl (121 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 121.9/121.9 kB 14.9 MB/s eta 0:00:00\n","Requirement already satisfied: jupyter-console in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.1.0)\n","Requirement already satisfied: nbconvert in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.5.4)\n","Requirement already satisfied: ipykernel in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.5.6)\n","Requirement already satisfied: ipywidgets in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (7.7.1)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.25.3->-r requirements_GenNet.txt (line 7)) (2022.7.1)\n","Requirement already satisfied: tenacity>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from plotly>=4.12.0->-r requirements_GenNet.txt (line 9)) (8.2.2)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.23.2->-r requirements_GenNet.txt (line 11)) (3.1.0)\n","Requirement already satisfied: cython>=0.29.21 in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (0.29.34)\n","Requirement already satisfied: numexpr>=2.6.2 in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (2.8.4)\n","Requirement already satisfied: blosc2~=2.0.0 in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (2.0.0)\n","Requirement already satisfied: py-cpuinfo in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (9.0.0)\n","Requirement already satisfied: attrs>=19.2.0 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (23.1.0)\n","Requirement already satisfied: iniconfig in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (2.0.0)\n","Requirement already satisfied: pluggy<2.0,>=0.12 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (1.0.0)\n","Requirement already satisfied: exceptiongroup>=1.0.0rc8 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (1.1.1)\n","Requirement already satisfied: tomli>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (2.0.1)\n","Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.40.0)\n","Requirement already satisfied: msgpack in /usr/local/lib/python3.10/dist-packages (from blosc2~=2.0.0->tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (1.0.5)\n","Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.17.3)\n","Collecting google-auth-oauthlib<0.5,>=0.4.1 (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading google_auth_oauthlib-0.4.6-py2.py3-none-any.whl (18 kB)\n","Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.27.1)\n","Collecting tensorboard-data-server<0.7.0,>=0.6.0 (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl (4.9 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 4.9/4.9 MB 122.9 MB/s eta 0:00:00\n","Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.8.1)\n","Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.3.0)\n","Requirement already satisfied: ipython-genutils in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.0)\n","Requirement already satisfied: ipython>=5.0.0 in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (7.34.0)\n","Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.7.1)\n","Requirement already satisfied: jupyter-client in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.1.12)\n","Requirement already satisfied: tornado>=4.2 in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.3.1)\n","Requirement already satisfied: widgetsnbextension~=3.6.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.6.4)\n","Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.0.7)\n","Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from jupyter-console->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.0.38)\n","Requirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from jupyter-console->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.14.0)\n","Requirement already satisfied: lxml in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.9.2)\n","Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.11.2)\n","Requirement already satisfied: bleach in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.0.0)\n","Requirement already satisfied: defusedxml in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.1)\n","Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.4)\n","Requirement already satisfied: jinja2>=3.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.1.2)\n","Requirement already satisfied: jupyter-core>=4.7 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.3.0)\n","Requirement already satisfied: jupyterlab-pygments in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.2)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.1.2)\n","Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.8.4)\n","Requirement already satisfied: nbclient>=0.5.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.4)\n","Requirement already satisfied: nbformat>=5.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.8.0)\n","Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.5.0)\n","Requirement already satisfied: tinycss2 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.2.1)\n","Requirement already satisfied: pyzmq>=17 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (23.2.1)\n","Requirement already satisfied: argon2-cffi in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (21.3.0)\n","Requirement already satisfied: nest-asyncio>=1.5 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.5.6)\n","Requirement already satisfied: Send2Trash>=1.8.0 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.8.0)\n","Requirement already satisfied: terminado>=0.8.3 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.17.1)\n","Requirement already satisfied: prometheus-client in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.16.0)\n","Collecting qtpy>=2.0.1 (from qtconsole->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5))\n"," Downloading QtPy-2.3.1-py3-none-any.whl (84 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 84.9/84.9 kB 13.0 MB/s eta 0:00:00\n","Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (5.3.0)\n","Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.3.0)\n","Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (4.9)\n","Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.3.1)\n","Collecting jedi>=0.16 (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5))\n"," Downloading jedi-0.18.2-py2.py3-none-any.whl (1.6 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.6/1.6 MB 97.2 MB/s eta 0:00:00\n","Requirement already satisfied: decorator in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.4.2)\n","Requirement already satisfied: pickleshare in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.5)\n","Requirement already satisfied: backcall in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.0)\n","Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.1.6)\n","Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.8.0)\n","Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.10/dist-packages (from jupyter-core>=4.7->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.3.0)\n","Requirement already satisfied: fastjsonschema in /usr/local/lib/python3.10/dist-packages (from nbformat>=5.1->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.16.3)\n","Requirement already satisfied: jsonschema>=2.6 in /usr/local/lib/python3.10/dist-packages (from nbformat>=5.1->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.3.3)\n","Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->jupyter-console->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.6)\n","Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.26.15)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2022.12.7)\n","Requirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.0.12)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (3.4)\n","Requirement already satisfied: ptyprocess in /usr/local/lib/python3.10/dist-packages (from terminado>=0.8.3->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.0)\n","Requirement already satisfied: argon2-cffi-bindings in /usr/local/lib/python3.10/dist-packages (from argon2-cffi->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (21.2.0)\n","Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.4.1)\n","Requirement already satisfied: webencodings in /usr/local/lib/python3.10/dist-packages (from bleach->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.5.1)\n","Requirement already satisfied: parso<0.9.0,>=0.8.0 in /usr/local/lib/python3.10/dist-packages (from jedi>=0.16->ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.8.3)\n","Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat>=5.1->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.19.3)\n","Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.5.0)\n","Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (3.2.2)\n","Requirement already satisfied: cffi>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from argon2-cffi-bindings->argon2-cffi->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.15.1)\n","Requirement already satisfied: pycparser in /usr/local/lib/python3.10/dist-packages (from cffi>=1.0.1->argon2-cffi-bindings->argon2-cffi->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.21)\n","Building wheels for collected packages: sklearn\n"," Building wheel for sklearn (setup.py): started\n"," Building wheel for sklearn (setup.py): finished with status 'done'\n"," Created wheel for sklearn: filename=sklearn-0.0.post5-py3-none-any.whl size=2950 sha256=693e442add121f1ce761438cd2a0cecad12ee3bc74cdc5bf5cef0869d99e6071\n"," Stored in directory: /root/.cache/pip/wheels/38/1f/8d/4f812c590e074c1e928f5cec67bf5053b71f38e2648739403a\n","Successfully built sklearn\n","Installing collected packages: tf-estimator-nightly, sklearn, keras, kaleido, bitarray, tensorboard-data-server, qtpy, protobuf, keras-preprocessing, jedi, google-auth-oauthlib, tensorboard, qtconsole, tensorflow, jupyter\n"," Attempting uninstall: keras\n"," Found existing installation: keras 2.12.0\n"," Uninstalling keras-2.12.0:\n"," Successfully uninstalled keras-2.12.0\n"," Attempting uninstall: tensorboard-data-server\n"," Found existing installation: tensorboard-data-server 0.7.0\n"," Uninstalling tensorboard-data-server-0.7.0:\n"," Successfully uninstalled tensorboard-data-server-0.7.0\n"," Attempting uninstall: protobuf\n"," Found existing installation: protobuf 3.20.3\n"," Uninstalling protobuf-3.20.3:\n"," Successfully uninstalled protobuf-3.20.3\n"," Attempting uninstall: google-auth-oauthlib\n"," Found existing installation: google-auth-oauthlib 1.0.0\n"," Uninstalling google-auth-oauthlib-1.0.0:\n"," Successfully uninstalled google-auth-oauthlib-1.0.0\n"," Attempting uninstall: tensorboard\n"," Found existing installation: tensorboard 2.12.2\n"," Uninstalling tensorboard-2.12.2:\n"," Successfully uninstalled tensorboard-2.12.2\n"," Attempting uninstall: tensorflow\n"," Found existing installation: tensorflow 2.12.0\n"," Uninstalling tensorflow-2.12.0:\n"," Successfully uninstalled tensorflow-2.12.0\n","Successfully installed bitarray-2.7.3 google-auth-oauthlib-0.4.6 jedi-0.18.2 jupyter-1.0.0 kaleido-0.2.1 keras-2.8.0 keras-preprocessing-1.1.2 protobuf-3.20.2 qtconsole-5.4.3 qtpy-2.3.1 sklearn-0.0.post5 tensorboard-2.8.0 tensorboard-data-server-0.6.1 tensorflow-2.8.0 tf-estimator-nightly-2.8.0.dev2021122109\n"]},{"output_type":"stream","name":"stderr","text":["ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n","tensorflow-metadata 1.13.1 requires protobuf<5,>=3.20.3, but you have protobuf 3.20.2 which is incompatible.\n"]}],"source":["#@title Default title text\n","%%bash\n"," # navigate to the created folder\n","pip3 install --upgrade pip # update pip is recommended but not a necessity\n","cd GenNet\n","pip install -r requirements_GenNet.txt # install all the requirementes voor GenNet."]},{"cell_type":"code","execution_count":3,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":3050,"status":"ok","timestamp":1684064366961,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"n3N5BRSGSkdE","outputId":"8b51bc7f-cf4f-4dab-b1c3-ebff7ab5837b"},"outputs":[{"output_type":"stream","name":"stdout","text":["= or more then 2.0: tensorflow version is 2.8.0\n"]}],"source":["import os\n","os.chdir(\"/content/GenNet\")\n","import sys\n","sys.path.append(\"/content/root_build/\")\n","import glob\n","import numpy as np\n","import pandas as pd\n","#sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))\n","import matplotlib\n","\n","matplotlib.use('agg')\n","import tensorflow as tf\n","import tensorflow.keras as K\n","import scipy\n","import tables\n","tf.keras.backend.set_epsilon(0.0000001)\n","tf_version = tf.__version__ # ToDo use packaging.version\n","if tf_version <= '1.13.1':\n"," from GenNet_utils.LocallyDirectedConnected import LocallyDirected1D\n"," print('= or less then 1.13.1: tensorflow version is', tf_version)\n","elif tf_version >= '2.0':\n"," from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D\n"," print('= or more then 2.0: tensorflow version is', tf_version)\n","else:\n"," print(\"unexpected tensorflow version\")\n"," from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D"]},{"cell_type":"code","execution_count":5,"metadata":{"id":"9MFJ6F7Ab4VD","executionInfo":{"status":"ok","timestamp":1684064380991,"user_tz":-180,"elapsed":798,"user":{"displayName":"Alina F","userId":"17740631281554612245"}}},"outputs":[],"source":["def layer_block(model, mask, i, regression):\n"," \n"," if regression:\n"," activation_type=\"relu\"\n"," else:\n"," activation_type=\"tanh\"\n"," \n"," model = LocallyDirected1D(mask=mask, filters=1, input_shape=(mask.shape[0], 1),\n"," name=\"LocallyDirected_\" + str(i))(model)\n"," # model = K.layers.Flatten()(model)\n"," model = K.layers.Activation(activation_type)(model)\n"," model = K.layers.BatchNormalization(center=False, scale=False)(model)\n"," return model\n","\n","\n","def add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain):\n"," if num_covariates > 0:\n"," model = activation_layer(model, regression, negative_values_ytrain)\n"," model = K.layers.concatenate([model, input_cov], axis=1)\n"," model = K.layers.BatchNormalization(center=False, scale=False)(model)\n"," model = K.layers.Dense(units=1, bias_initializer= tf.keras.initializers.Constant(mean_ytrain))(model)\n"," return model\n","\n","\n","def activation_layer(model, regression, negative_values_ytrain): \n"," if regression: \n"," if negative_values_ytrain:\n"," model = K.layers.Activation(\"linear\")(model)\n"," print('using a linear activation function')\n"," else:\n"," model = K.layers.Activation(\"relu\")(model)\n"," print('using a relu activation function')\n"," else:\n"," model = K.layers.Activation(\"sigmoid\")(model)\n"," \n"," return model\n","\n","def create_network_from_npz(datapath,\n"," inputsize,\n"," genotype_path,\n"," l1_value=0.01,\n"," regression=False,\n"," num_covariates=0,\n"," mask_order = []):\n"," print(\"Creating networks from npz masks\")\n"," print(\"regression\", regression)\n"," if regression:\n"," mean_ytrain, negative_values_ytrain = regression_properties(datapath)\n"," else:\n"," mean_ytrain = 0\n"," negative_values_ytrain = False\n","\n"," masks = []\n"," mask_shapes_x = []\n"," mask_shapes_y = []\n","\n"," print(mask_order)\n","\n"," if len(mask_order) > 0: # if mask_order is defined we use this order\n"," for mask in mask_order:\n"," mask = scipy.sparse.load_npz(datapath + '/'+str(mask)+'.npz')\n"," masks.append(mask)\n"," mask_shapes_x.append(mask.shape[0])\n"," mask_shapes_y.append(mask.shape[1])\n","\n"," for x in range(len(masks) - 1): # check that the masks fit eachother\n"," assert mask_shapes_y[x] == mask_shapes_x[x + 1]\n"," else:\n"," # if mask order is not defined we can sort the mask by the size\n"," for npz_path in glob.glob(datapath + '/*.npz'):\n"," mask = scipy.sparse.load_npz(npz_path)\n"," masks.append(mask)\n"," mask_shapes_x.append(mask.shape[0])\n"," mask_shapes_y.append(mask.shape[1])\n","\n"," for i in range(len(masks)): # sort all the masks in the correct order\n"," argsort_x = np.argsort(mask_shapes_x)[::-1]\n"," argsort_y = np.argsort(mask_shapes_y)[::-1]\n","\n"," mask_shapes_x = np.array(mask_shapes_x)\n"," mask_shapes_y = np.array(mask_shapes_y)\n"," assert all(argsort_x == argsort_y) # check that both dimensions have the same order\n","\n"," masks = [masks[i] for i in argsort_y] # sort masks\n"," mask_shapes_x = mask_shapes_x[argsort_x]\n"," mask_shapes_y = mask_shapes_y[argsort_y]\n","\n"," for x in range(len(masks) - 1): # check that the masks fit eachother\n"," assert mask_shapes_y[x] == mask_shapes_x[x + 1]\n"," print('mask_shapes_x[0]', mask_shapes_x[0])\n"," assert mask_shapes_x[0] == inputsize\n"," print('mask_shapes_y[-1]', mask_shapes_y[-1])\n"," if mask_shapes_y[-1] == 1: # should we end with a dense layer?\n"," all_masks_available = True\n"," else:\n"," all_masks_available = False\n","\n"," input_layer = K.Input((inputsize,), name='input_layer')\n"," input_cov = K.Input((num_covariates,), name='inputs_cov')\n","\n"," model = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(input_layer)\n","\n"," for i in range(len(masks)):\n"," mask = masks[i]\n"," model = layer_block(model, mask, i, regression)\n","\n"," model = K.layers.Flatten()(model)\n","\n"," if all_masks_available:\n"," model = LocallyDirected1D(mask=masks[-1], filters=1, input_shape=(mask.shape[0], 1),\n"," name=\"output_layer\")(model)\n"," else:\n"," model = K.layers.Dense(units=1, name=\"output_layer\",\n"," kernel_regularizer=tf.keras.regularizers.l1(l=l1_value)\n"," )(model)\n","\n"," model = add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain)\n","\n"," output_layer = activation_layer(model, regression, negative_values_ytrain)\n"," model = K.Model(inputs=[input_layer, input_cov], outputs=output_layer)\n","\n"," print(model.summary())\n","\n"," return model, masks"]},{"cell_type":"code","execution_count":7,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":144},"executionInfo":{"elapsed":13094,"status":"ok","timestamp":1684064899243,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"12RtThI3TDzs","outputId":"9c960e91-bdea-4025-86a3-e5e9a710ae8a"},"outputs":[{"output_type":"display_data","data":{"text/plain":["<IPython.core.display.HTML object>"],"text/html":["\n"," <input type=\"file\" id=\"files-a028873d-5961-4b44-85ab-e32e99304079\" name=\"files[]\" multiple disabled\n"," style=\"border:none\" />\n"," <output id=\"result-a028873d-5961-4b44-85ab-e32e99304079\">\n"," Upload widget is only available when the cell has been executed in the\n"," current browser session. Please rerun this cell to enable.\n"," </output>\n"," <script>// Copyright 2017 Google LLC\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","/**\n"," * @fileoverview Helpers for google.colab Python module.\n"," */\n","(function(scope) {\n","function span(text, styleAttributes = {}) {\n"," const element = document.createElement('span');\n"," element.textContent = text;\n"," for (const key of Object.keys(styleAttributes)) {\n"," element.style[key] = styleAttributes[key];\n"," }\n"," return element;\n","}\n","\n","// Max number of bytes which will be uploaded at a time.\n","const MAX_PAYLOAD_SIZE = 100 * 1024;\n","\n","function _uploadFiles(inputId, outputId) {\n"," const steps = uploadFilesStep(inputId, outputId);\n"," const outputElement = document.getElementById(outputId);\n"," // Cache steps on the outputElement to make it available for the next call\n"," // to uploadFilesContinue from Python.\n"," outputElement.steps = steps;\n","\n"," return _uploadFilesContinue(outputId);\n","}\n","\n","// This is roughly an async generator (not supported in the browser yet),\n","// where there are multiple asynchronous steps and the Python side is going\n","// to poll for completion of each step.\n","// This uses a Promise to block the python side on completion of each step,\n","// then passes the result of the previous step as the input to the next step.\n","function _uploadFilesContinue(outputId) {\n"," const outputElement = document.getElementById(outputId);\n"," const steps = outputElement.steps;\n","\n"," const next = steps.next(outputElement.lastPromiseValue);\n"," return Promise.resolve(next.value.promise).then((value) => {\n"," // Cache the last promise value to make it available to the next\n"," // step of the generator.\n"," outputElement.lastPromiseValue = value;\n"," return next.value.response;\n"," });\n","}\n","\n","/**\n"," * Generator function which is called between each async step of the upload\n"," * process.\n"," * @param {string} inputId Element ID of the input file picker element.\n"," * @param {string} outputId Element ID of the output display.\n"," * @return {!Iterable<!Object>} Iterable of next steps.\n"," */\n","function* uploadFilesStep(inputId, outputId) {\n"," const inputElement = document.getElementById(inputId);\n"," inputElement.disabled = false;\n","\n"," const outputElement = document.getElementById(outputId);\n"," outputElement.innerHTML = '';\n","\n"," const pickedPromise = new Promise((resolve) => {\n"," inputElement.addEventListener('change', (e) => {\n"," resolve(e.target.files);\n"," });\n"," });\n","\n"," const cancel = document.createElement('button');\n"," inputElement.parentElement.appendChild(cancel);\n"," cancel.textContent = 'Cancel upload';\n"," const cancelPromise = new Promise((resolve) => {\n"," cancel.onclick = () => {\n"," resolve(null);\n"," };\n"," });\n","\n"," // Wait for the user to pick the files.\n"," const files = yield {\n"," promise: Promise.race([pickedPromise, cancelPromise]),\n"," response: {\n"," action: 'starting',\n"," }\n"," };\n","\n"," cancel.remove();\n","\n"," // Disable the input element since further picks are not allowed.\n"," inputElement.disabled = true;\n","\n"," if (!files) {\n"," return {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n"," }\n","\n"," for (const file of files) {\n"," const li = document.createElement('li');\n"," li.append(span(file.name, {fontWeight: 'bold'}));\n"," li.append(span(\n"," `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n"," `last modified: ${\n"," file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n"," 'n/a'} - `));\n"," const percent = span('0% done');\n"," li.appendChild(percent);\n","\n"," outputElement.appendChild(li);\n","\n"," const fileDataPromise = new Promise((resolve) => {\n"," const reader = new FileReader();\n"," reader.onload = (e) => {\n"," resolve(e.target.result);\n"," };\n"," reader.readAsArrayBuffer(file);\n"," });\n"," // Wait for the data to be ready.\n"," let fileData = yield {\n"," promise: fileDataPromise,\n"," response: {\n"," action: 'continue',\n"," }\n"," };\n","\n"," // Use a chunked sending to avoid message size limits. See b/62115660.\n"," let position = 0;\n"," do {\n"," const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n"," const chunk = new Uint8Array(fileData, position, length);\n"," position += length;\n","\n"," const base64 = btoa(String.fromCharCode.apply(null, chunk));\n"," yield {\n"," response: {\n"," action: 'append',\n"," file: file.name,\n"," data: base64,\n"," },\n"," };\n","\n"," let percentDone = fileData.byteLength === 0 ?\n"," 100 :\n"," Math.round((position / fileData.byteLength) * 100);\n"," percent.textContent = `${percentDone}% done`;\n","\n"," } while (position < fileData.byteLength);\n"," }\n","\n"," // All done.\n"," yield {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n","}\n","\n","scope.google = scope.google || {};\n","scope.google.colab = scope.google.colab || {};\n","scope.google.colab._files = {\n"," _uploadFiles,\n"," _uploadFilesContinue,\n","};\n","})(self);\n","</script> "]},"metadata":{}},{"output_type":"stream","name":"stdout","text":["Saving mask_gene_local.npz to mask_gene_local.npz\n","Saving mask_local_mid.npz to mask_local_mid.npz\n","Saving mask_mid_global.npz to mask_mid_global.npz\n"]}],"source":["# upload masks\n","from google.colab import files\n","uploaded = files.upload()"]},{"cell_type":"code","execution_count":9,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":74},"executionInfo":{"elapsed":919464,"status":"ok","timestamp":1684067172906,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"lu-oDA-NYtFQ","outputId":"b9531ec9-17ba-440b-9c2a-0532cb119a44"},"outputs":[{"output_type":"display_data","data":{"text/plain":["<IPython.core.display.HTML object>"],"text/html":["\n"," <input type=\"file\" id=\"files-e3e3e129-437a-4976-96ef-146001b52327\" name=\"files[]\" multiple disabled\n"," style=\"border:none\" />\n"," <output id=\"result-e3e3e129-437a-4976-96ef-146001b52327\">\n"," Upload widget is only available when the cell has been executed in the\n"," current browser session. Please rerun this cell to enable.\n"," </output>\n"," <script>// Copyright 2017 Google LLC\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","/**\n"," * @fileoverview Helpers for google.colab Python module.\n"," */\n","(function(scope) {\n","function span(text, styleAttributes = {}) {\n"," const element = document.createElement('span');\n"," element.textContent = text;\n"," for (const key of Object.keys(styleAttributes)) {\n"," element.style[key] = styleAttributes[key];\n"," }\n"," return element;\n","}\n","\n","// Max number of bytes which will be uploaded at a time.\n","const MAX_PAYLOAD_SIZE = 100 * 1024;\n","\n","function _uploadFiles(inputId, outputId) {\n"," const steps = uploadFilesStep(inputId, outputId);\n"," const outputElement = document.getElementById(outputId);\n"," // Cache steps on the outputElement to make it available for the next call\n"," // to uploadFilesContinue from Python.\n"," outputElement.steps = steps;\n","\n"," return _uploadFilesContinue(outputId);\n","}\n","\n","// This is roughly an async generator (not supported in the browser yet),\n","// where there are multiple asynchronous steps and the Python side is going\n","// to poll for completion of each step.\n","// This uses a Promise to block the python side on completion of each step,\n","// then passes the result of the previous step as the input to the next step.\n","function _uploadFilesContinue(outputId) {\n"," const outputElement = document.getElementById(outputId);\n"," const steps = outputElement.steps;\n","\n"," const next = steps.next(outputElement.lastPromiseValue);\n"," return Promise.resolve(next.value.promise).then((value) => {\n"," // Cache the last promise value to make it available to the next\n"," // step of the generator.\n"," outputElement.lastPromiseValue = value;\n"," return next.value.response;\n"," });\n","}\n","\n","/**\n"," * Generator function which is called between each async step of the upload\n"," * process.\n"," * @param {string} inputId Element ID of the input file picker element.\n"," * @param {string} outputId Element ID of the output display.\n"," * @return {!Iterable<!Object>} Iterable of next steps.\n"," */\n","function* uploadFilesStep(inputId, outputId) {\n"," const inputElement = document.getElementById(inputId);\n"," inputElement.disabled = false;\n","\n"," const outputElement = document.getElementById(outputId);\n"," outputElement.innerHTML = '';\n","\n"," const pickedPromise = new Promise((resolve) => {\n"," inputElement.addEventListener('change', (e) => {\n"," resolve(e.target.files);\n"," });\n"," });\n","\n"," const cancel = document.createElement('button');\n"," inputElement.parentElement.appendChild(cancel);\n"," cancel.textContent = 'Cancel upload';\n"," const cancelPromise = new Promise((resolve) => {\n"," cancel.onclick = () => {\n"," resolve(null);\n"," };\n"," });\n","\n"," // Wait for the user to pick the files.\n"," const files = yield {\n"," promise: Promise.race([pickedPromise, cancelPromise]),\n"," response: {\n"," action: 'starting',\n"," }\n"," };\n","\n"," cancel.remove();\n","\n"," // Disable the input element since further picks are not allowed.\n"," inputElement.disabled = true;\n","\n"," if (!files) {\n"," return {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n"," }\n","\n"," for (const file of files) {\n"," const li = document.createElement('li');\n"," li.append(span(file.name, {fontWeight: 'bold'}));\n"," li.append(span(\n"," `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n"," `last modified: ${\n"," file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n"," 'n/a'} - `));\n"," const percent = span('0% done');\n"," li.appendChild(percent);\n","\n"," outputElement.appendChild(li);\n","\n"," const fileDataPromise = new Promise((resolve) => {\n"," const reader = new FileReader();\n"," reader.onload = (e) => {\n"," resolve(e.target.result);\n"," };\n"," reader.readAsArrayBuffer(file);\n"," });\n"," // Wait for the data to be ready.\n"," let fileData = yield {\n"," promise: fileDataPromise,\n"," response: {\n"," action: 'continue',\n"," }\n"," };\n","\n"," // Use a chunked sending to avoid message size limits. See b/62115660.\n"," let position = 0;\n"," do {\n"," const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n"," const chunk = new Uint8Array(fileData, position, length);\n"," position += length;\n","\n"," const base64 = btoa(String.fromCharCode.apply(null, chunk));\n"," yield {\n"," response: {\n"," action: 'append',\n"," file: file.name,\n"," data: base64,\n"," },\n"," };\n","\n"," let percentDone = fileData.byteLength === 0 ?\n"," 100 :\n"," Math.round((position / fileData.byteLength) * 100);\n"," percent.textContent = `${percentDone}% done`;\n","\n"," } while (position < fileData.byteLength);\n"," }\n","\n"," // All done.\n"," yield {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n","}\n","\n","scope.google = scope.google || {};\n","scope.google.colab = scope.google.colab || {};\n","scope.google.colab._files = {\n"," _uploadFiles,\n"," _uploadFilesContinue,\n","};\n","})(self);\n","</script> "]},"metadata":{}},{"output_type":"stream","name":"stdout","text":["Saving bestweight_job_hypertension.h5 to bestweight_job_hypertension.h5\n"]}],"source":["# upload bestweights\n","from google.colab import files\n","uploaded = files.upload()"]},{"cell_type":"code","execution_count":10,"metadata":{"id":"bODh5UtsZaWd","executionInfo":{"status":"ok","timestamp":1684067202828,"user_tz":-180,"elapsed":2,"user":{"displayName":"Alina F","userId":"17740631281554612245"}}},"outputs":[],"source":["from tensorflow.python.framework.ops import disable_eager_execution\n","\n","disable_eager_execution()"]},{"cell_type":"code","execution_count":11,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":86056,"status":"ok","timestamp":1684067291262,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"T5dG7o4jUK2p","outputId":"4ef4ec4a-6d76-4cb2-a459-c6c1a54e9c7f"},"outputs":[{"output_type":"stream","name":"stdout","text":["Creating networks from npz masks\n","regression False\n","['UKBB_sparse_connection_mask_ensmb_alligned', 'gene_ensmbl_GTEx_mask_tstat']\n","mask_shapes_x[0] 6986636\n","mask_shapes_y[-1] 53\n"]},{"output_type":"stream","name":"stderr","text":["WARNING:tensorflow:From /usr/local/lib/python3.10/dist-packages/keras/layers/normalization/batch_normalization.py:532: _colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n","Instructions for updating:\n","Colocations handled automatically by placer.\n"]},{"output_type":"stream","name":"stdout","text":["Model: \"model\"\n","__________________________________________________________________________________________________\n"," Layer (type) Output Shape Param # Connected to \n","==================================================================================================\n"," input_layer (InputLayer) [(None, 6986636)] 0 [] \n"," \n"," reshape (Reshape) (None, 6986636, 1) 0 ['input_layer[0][0]'] \n"," \n"," activation (Activation) (None, 21476, 1) 0 ['LocallyDirected_0[0][0]'] \n"," \n"," batch_normalization (BatchNorm (None, 21476, 1) 2 ['activation[0][0]'] \n"," alization) \n"," \n"," activation_1 (Activation) (None, 53, 1) 0 ['LocallyDirected_1[0][0]'] \n"," \n"," batch_normalization_1 (BatchNo (None, 53, 1) 2 ['activation_1[0][0]'] \n"," rmalization) \n"," \n"," flatten (Flatten) (None, 53) 0 ['batch_normalization_1[0][0]'] \n"," \n"," output_layer (Dense) (None, 1) 54 ['flatten[0][0]'] \n"," \n"," inputs_cov (InputLayer) [(None, 0)] 0 [] \n"," \n"," activation_2 (Activation) (None, 1) 0 ['output_layer[0][0]'] \n"," \n","==================================================================================================\n","Total params: 6,748,586\n","Trainable params: 6,748,582\n","Non-trainable params: 4\n","__________________________________________________________________________________________________\n","None\n"]}],"source":["\"experiment\"\n","datapath = '/content/GenNet/'\n","inputsize = 6986636\n","num_covariates = 0\n","genotype_path = datapath\n","l1_value = 0.001\n","model, masks = create_network_from_npz(datapath=datapath, inputsize=inputsize, genotype_path=genotype_path,mask_order=['UKBB_sparse_connection_mask_ensmb_alligned', 'gene_ensmbl_GTEx_mask_tstat'],\n"," l1_value=l1_value, regression=False, num_covariates=num_covariates, )"]},{"cell_type":"code","execution_count":12,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":318},"executionInfo":{"elapsed":18,"status":"error","timestamp":1684067461934,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"VAlSA3JGFB5j","outputId":"2d700685-7568-4184-cf6b-f3c91d498057"},"outputs":[{"output_type":"error","ename":"ValueError","evalue":"ignored","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)","\u001b[0;32m<ipython-input-12-ed276241e353>\u001b[0m in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_weights\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'/content/GenNet/bestweight_job_hypertension.h5'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/engine/training_v1.py\u001b[0m in \u001b[0;36mload_weights\u001b[0;34m(self, filepath, by_name, skip_mismatch)\u001b[0m\n\u001b[1;32m 212\u001b[0m raise ValueError('Load weights is not yet supported with TPUStrategy '\n\u001b[1;32m 213\u001b[0m 'with steps_per_run greater than 1.')\n\u001b[0;32m--> 214\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mModel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_weights\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mby_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mskip_mismatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 215\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 216\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__internal__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtracking\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mno_automatic_dependency_tracking\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/utils/traceback_utils.py\u001b[0m in \u001b[0;36merror_handler\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 65\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# pylint: disable=broad-except\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 66\u001b[0m \u001b[0mfiltered_tb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_process_traceback_frames\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__traceback__\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 67\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwith_traceback\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfiltered_tb\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 68\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;32mdel\u001b[0m \u001b[0mfiltered_tb\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/saving/hdf5_format.py\u001b[0m in \u001b[0;36mload_weights_from_hdf5_group\u001b[0;34m(f, model)\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0mlayer_names\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfiltered_layer_names\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 727\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlayer_names\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfiltered_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 728\u001b[0;31m raise ValueError(\n\u001b[0m\u001b[1;32m 729\u001b[0m \u001b[0;34mf'Layer count mismatch when loading weights from file. '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 730\u001b[0m \u001b[0;34mf'Model expected {len(filtered_layers)} layers, found '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mValueError\u001b[0m: Layer count mismatch when loading weights from file. Model expected 3 layers, found 5 saved layers."]}],"source":["model.load_weights('/content/GenNet/bestweight_job_hypertension.h5')"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":18359,"status":"ok","timestamp":1683823851299,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"DV0kBxoihpPm","outputId":"45f088d2-875a-46f2-9b02-88c050bdb3c4"},"outputs":[{"name":"stdout","output_type":"stream","text":["Model: \"model_7\"\n","__________________________________________________________________________________________________\n"," Layer (type) Output Shape Param # Connected to \n","==================================================================================================\n"," input_layer (InputLayer) [(None, 6986636)] 0 [] \n"," \n"," reshape_7 (Reshape) (None, 6986636, 1) 0 ['input_layer[0][0]'] \n"," \n"," activation_15 (Activation) (None, 21476, 1) 0 ['LocallyDirected_0[0][0]'] \n"," \n"," batch_normalization_8 (BatchNo (None, 21476, 1) 2 ['activation_15[0][0]'] \n"," rmalization) \n"," \n"," activation_16 (Activation) (None, 292, 1) 0 ['LocallyDirected_1[0][0]'] \n"," \n"," batch_normalization_9 (BatchNo (None, 292, 1) 2 ['activation_16[0][0]'] \n"," rmalization) \n"," \n"," flatten_7 (Flatten) (None, 292) 0 ['batch_normalization_9[0][0]'] \n"," \n"," output_layer (Dense) (None, 1) 293 ['flatten_7[0][0]'] \n"," \n"," inputs_cov (InputLayer) [(None, 0)] 0 [] \n"," \n"," activation_17 (Activation) (None, 1) 0 ['output_layer[0][0]'] \n"," \n","==================================================================================================\n","Total params: 6,984,918\n","Trainable params: 6,984,914\n","Non-trainable params: 4\n","__________________________________________________________________________________________________\n"]}],"source":["model.summary()"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":73},"executionInfo":{"elapsed":10768,"status":"ok","timestamp":1677750425067,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-240},"id":"1BVGwltjh1EJ","outputId":"34ac6187-5d3b-42fc-b1ad-6a1f1d4ece72"},"outputs":[{"data":{"text/html":["\n"," <input type=\"file\" id=\"files-8ca0acdd-2479-4d57-8937-b904c3b9f945\" name=\"files[]\" multiple disabled\n"," style=\"border:none\" />\n"," <output id=\"result-8ca0acdd-2479-4d57-8937-b904c3b9f945\">\n"," Upload widget is only available when the cell has been executed in the\n"," current browser session. Please rerun this cell to enable.\n"," </output>\n"," <script>// Copyright 2017 Google LLC\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","/**\n"," * @fileoverview Helpers for google.colab Python module.\n"," */\n","(function(scope) {\n","function span(text, styleAttributes = {}) {\n"," const element = document.createElement('span');\n"," element.textContent = text;\n"," for (const key of Object.keys(styleAttributes)) {\n"," element.style[key] = styleAttributes[key];\n"," }\n"," return element;\n","}\n","\n","// Max number of bytes which will be uploaded at a time.\n","const MAX_PAYLOAD_SIZE = 100 * 1024;\n","\n","function _uploadFiles(inputId, outputId) {\n"," const steps = uploadFilesStep(inputId, outputId);\n"," const outputElement = document.getElementById(outputId);\n"," // Cache steps on the outputElement to make it available for the next call\n"," // to uploadFilesContinue from Python.\n"," outputElement.steps = steps;\n","\n"," return _uploadFilesContinue(outputId);\n","}\n","\n","// This is roughly an async generator (not supported in the browser yet),\n","// where there are multiple asynchronous steps and the Python side is going\n","// to poll for completion of each step.\n","// This uses a Promise to block the python side on completion of each step,\n","// then passes the result of the previous step as the input to the next step.\n","function _uploadFilesContinue(outputId) {\n"," const outputElement = document.getElementById(outputId);\n"," const steps = outputElement.steps;\n","\n"," const next = steps.next(outputElement.lastPromiseValue);\n"," return Promise.resolve(next.value.promise).then((value) => {\n"," // Cache the last promise value to make it available to the next\n"," // step of the generator.\n"," outputElement.lastPromiseValue = value;\n"," return next.value.response;\n"," });\n","}\n","\n","/**\n"," * Generator function which is called between each async step of the upload\n"," * process.\n"," * @param {string} inputId Element ID of the input file picker element.\n"," * @param {string} outputId Element ID of the output display.\n"," * @return {!Iterable<!Object>} Iterable of next steps.\n"," */\n","function* uploadFilesStep(inputId, outputId) {\n"," const inputElement = document.getElementById(inputId);\n"," inputElement.disabled = false;\n","\n"," const outputElement = document.getElementById(outputId);\n"," outputElement.innerHTML = '';\n","\n"," const pickedPromise = new Promise((resolve) => {\n"," inputElement.addEventListener('change', (e) => {\n"," resolve(e.target.files);\n"," });\n"," });\n","\n"," const cancel = document.createElement('button');\n"," inputElement.parentElement.appendChild(cancel);\n"," cancel.textContent = 'Cancel upload';\n"," const cancelPromise = new Promise((resolve) => {\n"," cancel.onclick = () => {\n"," resolve(null);\n"," };\n"," });\n","\n"," // Wait for the user to pick the files.\n"," const files = yield {\n"," promise: Promise.race([pickedPromise, cancelPromise]),\n"," response: {\n"," action: 'starting',\n"," }\n"," };\n","\n"," cancel.remove();\n","\n"," // Disable the input element since further picks are not allowed.\n"," inputElement.disabled = true;\n","\n"," if (!files) {\n"," return {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n"," }\n","\n"," for (const file of files) {\n"," const li = document.createElement('li');\n"," li.append(span(file.name, {fontWeight: 'bold'}));\n"," li.append(span(\n"," `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n"," `last modified: ${\n"," file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n"," 'n/a'} - `));\n"," const percent = span('0% done');\n"," li.appendChild(percent);\n","\n"," outputElement.appendChild(li);\n","\n"," const fileDataPromise = new Promise((resolve) => {\n"," const reader = new FileReader();\n"," reader.onload = (e) => {\n"," resolve(e.target.result);\n"," };\n"," reader.readAsArrayBuffer(file);\n"," });\n"," // Wait for the data to be ready.\n"," let fileData = yield {\n"," promise: fileDataPromise,\n"," response: {\n"," action: 'continue',\n"," }\n"," };\n","\n"," // Use a chunked sending to avoid message size limits. See b/62115660.\n"," let position = 0;\n"," do {\n"," const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n"," const chunk = new Uint8Array(fileData, position, length);\n"," position += length;\n","\n"," const base64 = btoa(String.fromCharCode.apply(null, chunk));\n"," yield {\n"," response: {\n"," action: 'append',\n"," file: file.name,\n"," data: base64,\n"," },\n"," };\n","\n"," let percentDone = fileData.byteLength === 0 ?\n"," 100 :\n"," Math.round((position / fileData.byteLength) * 100);\n"," percent.textContent = `${percentDone}% done`;\n","\n"," } while (position < fileData.byteLength);\n"," }\n","\n"," // All done.\n"," yield {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n","}\n","\n","scope.google = scope.google || {};\n","scope.google.colab = scope.google.colab || {};\n","scope.google.colab._files = {\n"," _uploadFiles,\n"," _uploadFilesContinue,\n","};\n","})(self);\n","</script> "],"text/plain":["<IPython.core.display.HTML object>"]},"metadata":{},"output_type":"display_data"},{"name":"stdout","output_type":"stream","text":["Saving genotype.h5 to genotype.h5\n"]}],"source":["#upload genotype.h5\n","from google.colab import files\n","uploaded = files.upload()"]},{"cell_type":"code","source":["def get_testdata(datapath):\n"," # ytest = pd.read_csv(datapath + \"ytest_\"+studyname+\".csv\")\n"," h5file = tables.open_file(datapath + studyname + '_genotype_processed.h5', \"r\")\n"," # ybatch = ytest[\"labels\"]\n"," # xbatchid = np.array(ytest[\"tot_index\"].values, dtype=np.int64)\n"," xbatch = h5file.root.data[:]\n"," # ybatch = np.reshape(np.array(ybatch), (-1, 1))\n"," h5file.close()\n"," return xbatch"],"metadata":{"id":"k2BK2CDpA7N-"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"jkvt7OSih7rx"},"outputs":[],"source":["xtest = get_testdata(datapath)\n","pred = model.predict(xtest)\n","print('model prediction: ', pred)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"Rehan6kHsWnl"},"outputs":[],"source":["!python GenNet.py plot --help"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":7386,"status":"ok","timestamp":1678446242564,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-240},"id":"KHFvo68gMGlk","outputId":"56d40c38-9248-4d6a-b090-0dc3197b6e7c"},"outputs":[{"name":"stdout","output_type":"stream","text":["2023-03-10 11:03:58.890805: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer.so.7'; dlerror: libnvinfer.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /usr/local/nvidia/lib:/usr/local/nvidia/lib64\n","2023-03-10 11:03:58.891032: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer_plugin.so.7'; dlerror: libnvinfer_plugin.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /usr/local/nvidia/lib:/usr/local/nvidia/lib64\n","2023-03-10 11:03:58.891063: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Cannot dlopen some TensorRT libraries. If you would like to use Nvidia GPU with TensorRT, please make sure the missing libraries mentioned above are installed properly.\n","bitarray failed to import this might give some trouble converting binary files\n","Resultspath did not exist but is made now\n","Traceback (most recent call last):\n"," File \"/content/GenNet/GenNet.py\", line 284, in <module>\n"," main()\n"," File \"/content/GenNet/GenNet.py\", line 26, in main\n"," plot(args)\n"," File \"/content/GenNet/GenNet_utils/Create_plots.py\", line 233, in plot\n"," importance_csv = pd.read_csv(resultpath + \"/connection_weights.csv\", index_col=0)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/util/_decorators.py\", line 311, in wrapper\n"," return func(*args, **kwargs)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 586, in read_csv\n"," return _read(filepath_or_buffer, kwds)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 482, in _read\n"," parser = TextFileReader(filepath_or_buffer, **kwds)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 811, in __init__\n"," self._engine = self._make_engine(self.engine)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 1040, in _make_engine\n"," return mapping[engine](self.f, **self.options) # type: ignore[call-arg]\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/c_parser_wrapper.py\", line 51, in __init__\n"," self._open_handles(src, kwds)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/base_parser.py\", line 222, in _open_handles\n"," self.handles = get_handle(\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/common.py\", line 702, in get_handle\n"," handle = open(\n","FileNotFoundError: [Errno 2] No such file or directory: '/content/GenNet/results/GenNet_experiment_100001_//connection_weights.csv'\n"]}],"source":["!python GenNet.py plot -type sunburst -ID 100001"]}],"metadata":{"colab":{"provenance":[{"file_id":"1VUJ29Eo01uViqzTJb4GV6LvxjzCg054l","timestamp":1684064001183}],"authorship_tag":"ABX9TyOawxaE/TmOkeswW4vFYxmX"},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":0}
gennet_launch_GTEx.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ os.chdir("/data/public/GenNet")
3
+ import sys
4
+ import glob
5
+ import numpy as np
6
+ import pandas as pd
7
+ #sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
8
+ import matplotlib
9
+ matplotlib.use('agg')
10
+ import tensorflow as tf
11
+ import tensorflow.keras as K
12
+ import scipy
13
+ import tables
14
+
15
+ tf.keras.backend.set_epsilon(0.0000001)
16
+ tf_version = tf.__version__ # ToDo use packaging.version
17
+ if tf_version <= '1.13.1':
18
+ from GenNet_utils.LocallyDirectedConnected import LocallyDirected1D
19
+ print('= or less then 1.13.1: tensorflow version is', tf_version)
20
+ elif tf_version >= '2.0':
21
+ from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D
22
+ print('= or more then 2.0: tensorflow version is', tf_version)
23
+ else:
24
+ print("unexpected tensorflow version")
25
+ from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D
26
+
27
+ studyname = 'test_GTEx'
28
+
29
+ def layer_block(model, mask, i, regression):
30
+
31
+ if regression:
32
+ activation_type="relu"
33
+ else:
34
+ activation_type="tanh"
35
+
36
+ model = LocallyDirected1D(mask=mask, filters=1, input_shape=(mask.shape[0], 1),
37
+ name="LocallyDirected_" + str(i))(model)
38
+
39
+ model = K.layers.Activation(activation_type)(model)
40
+ model = K.layers.BatchNormalization(center=False, scale=False)(model)
41
+ return model
42
+
43
+
44
+ def add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain):
45
+ if num_covariates > 0:
46
+ model = activation_layer(model, regression, negative_values_ytrain)
47
+ model = K.layers.concatenate([model, input_cov], axis=1)
48
+ model = K.layers.BatchNormalization(center=False, scale=False)(model)
49
+ model = K.layers.Dense(units=1, bias_initializer= tf.keras.initializers.Constant(mean_ytrain))(model)
50
+ return model
51
+
52
+
53
+ def activation_layer(model, regression, negative_values_ytrain):
54
+
55
+ if regression:
56
+ if negative_values_ytrain:
57
+ model = K.layers.Activation("linear")(model)
58
+ print('using a linear activation function')
59
+ else:
60
+ model = K.layers.Activation("relu")(model)
61
+ print('using a relu activation function')
62
+ else:
63
+ model = K.layers.Activation("sigmoid")(model)
64
+
65
+ return model
66
+
67
+ def create_network_from_npz(datapath,
68
+ inputsize,
69
+ genotype_path,
70
+ l1_value=0.01,
71
+ regression=False,
72
+ num_covariates=0,
73
+ mask_order = []):
74
+ print("Creating networks from npz masks")
75
+ print("regression", regression)
76
+ if regression:
77
+ mean_ytrain, negative_values_ytrain = regression_properties(datapath)
78
+ else:
79
+ mean_ytrain = 0
80
+ negative_values_ytrain = False
81
+
82
+ masks = []
83
+ mask_shapes_x = []
84
+ mask_shapes_y = []
85
+
86
+ print(mask_order)
87
+
88
+ if len(mask_order) > 0: # if mask_order is defined we use this order
89
+ for mask in mask_order:
90
+ mask = scipy.sparse.load_npz(datapath + '/'+str(mask)+'.npz')
91
+ masks.append(mask)
92
+ mask_shapes_x.append(mask.shape[0])
93
+ mask_shapes_y.append(mask.shape[1])
94
+
95
+ for x in range(len(masks) - 1): # check that the masks fit eachother
96
+ assert mask_shapes_y[x] == mask_shapes_x[x + 1]
97
+ else:
98
+ # if mask order is not defined we can sort the mask by the size
99
+ for npz_path in glob.glob(datapath + '/*.npz'):
100
+ mask = scipy.sparse.load_npz(npz_path)
101
+ masks.append(mask)
102
+ mask_shapes_x.append(mask.shape[0])
103
+ mask_shapes_y.append(mask.shape[1])
104
+
105
+ for i in range(len(masks)): # sort all the masks in the correct order
106
+ argsort_x = np.argsort(mask_shapes_x)[::-1]
107
+ argsort_y = np.argsort(mask_shapes_y)[::-1]
108
+
109
+ mask_shapes_x = np.array(mask_shapes_x)
110
+ mask_shapes_y = np.array(mask_shapes_y)
111
+ assert all(argsort_x == argsort_y) # check that both dimensions have the same order
112
+
113
+ masks = [masks[i] for i in argsort_y] # sort masks
114
+ mask_shapes_x = mask_shapes_x[argsort_x]
115
+ mask_shapes_y = mask_shapes_y[argsort_y]
116
+
117
+ for x in range(len(masks) - 1): # check that the masks fit eachother
118
+ assert mask_shapes_y[x] == mask_shapes_x[x + 1]
119
+ print('mask_shapes_x[0]', mask_shapes_x[0])
120
+ assert mask_shapes_x[0] == inputsize
121
+ print('mask_shapes_y[-1]', mask_shapes_y[-1])
122
+ if mask_shapes_y[-1] == 1: # should we end with a dense layer?
123
+ all_masks_available = True
124
+ else:
125
+ all_masks_available = False
126
+
127
+ input_layer = K.Input((inputsize,), name='input_layer')
128
+ input_cov = K.Input((num_covariates,), name='inputs_cov')
129
+
130
+ model = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(input_layer)
131
+
132
+ for i in range(len(masks)):
133
+ mask = masks[i]
134
+ model = layer_block(model, mask, i, regression)
135
+
136
+ model = K.layers.Flatten()(model)
137
+
138
+ if all_masks_available:
139
+ model = LocallyDirected1D(mask=masks[-1], filters=1, input_shape=(mask.shape[0], 1),
140
+ name="output_layer")(model)
141
+ else:
142
+ model = K.layers.Dense(units=1, name="output_layer",
143
+ kernel_regularizer=tf.keras.regularizers.l1(l=l1_value)
144
+ )(model)
145
+
146
+ model = add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain)
147
+
148
+ output_layer = activation_layer(model, regression, negative_values_ytrain)
149
+ model = K.Model(inputs=[input_layer, input_cov], outputs=output_layer)
150
+
151
+ print(model.summary())
152
+
153
+ return model, masks
154
+
155
+ def get_testdata(datapath):
156
+ # ytest = pd.read_csv(datapath + "ytest_"+studyname+".csv")
157
+ h5file = tables.open_file(datapath + studyname + '_genotype_processed.h5', "r")
158
+ # ybatch = ytest["labels"]
159
+ # xbatchid = np.array(ytest["tot_index"].values, dtype=np.int64)
160
+ xbatch = h5file.root.data[:]
161
+ # ybatch = np.reshape(np.array(ybatch), (-1, 1))
162
+ h5file.close()
163
+ return xbatch
164
+
165
+ def predict():
166
+ xtest = get_testdata(datapath)
167
+ pred = model.predict(xtest)
168
+ print('model prediction: ', pred)
169
+
170
+ datapath = '/data/public/GenNet/processed_data/'
171
+ inputsize = 6986636
172
+ num_covariates = 0
173
+ genotype_path = datapath
174
+ l1_value = 0.001
175
+ model, masks = create_network_from_npz(datapath=datapath, inputsize=inputsize, genotype_path=genotype_path,mask_order=['UKBB_sparse_connection_mask_ensmb_alligned', 'gene_ensmbl_GTEx_mask_tstat'],
176
+ l1_value=l1_value, regression=False, num_covariates=num_covariates,)
177
+ model.load_weights(datapath + 'bestweight_job_hypertension.h5')
178
+ print('weights have been loaded')
179
+ predict()
gennet_launch_Gene networks_RefGene.ipynb ADDED
@@ -0,0 +1 @@
 
 
1
+ {"cells":[{"cell_type":"markdown","metadata":{"id":"u0zNE8cvnaBp"},"source":["## Installing GenNet"]},{"cell_type":"markdown","metadata":{"id":"hwVxfoX_n4Ky"},"source":["All comments cells that open with **%%bash** are inputs for the command line.\n","\n","The requirements for GenNet can be found in the README of the github: https://github.com/ArnovanHilten/GenNet\n","\n","**Note:** GenNet can run on CPU and GPU. If you want to use GenNet with your GPU you need to ensure that you have CUDA installed that works with the tensorflow version."]},{"cell_type":"code","execution_count":1,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":2051,"status":"ok","timestamp":1684059586080,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"Bsr7IHGEl9M_","outputId":"cf888275-87d0-4450-e242-90a1f8967ad7"},"outputs":[{"output_type":"stream","name":"stderr","text":["Cloning into 'GenNet'...\n"]}],"source":["%%bash\n","# git clone https://github.com/ArnovanHilten/GenNet.git # Cloning the repositorie\n","git clone https://github.com/lnalinaf/GenNet.git"]},{"cell_type":"markdown","metadata":{"id":"L8sqIS_zygOc"},"source":["Move into the GenNet directory"]},{"cell_type":"markdown","metadata":{"id":"S-Gmfen7yjj-"},"source":["Install the requirements"]},{"cell_type":"code","execution_count":2,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"elapsed":84305,"status":"ok","timestamp":1684059684516,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"HgOeF5vUmEe9","outputId":"32316b01-3450-4aac-a3e2-0e80230121be"},"outputs":[{"output_type":"stream","name":"stdout","text":["Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n","Requirement already satisfied: pip in /usr/local/lib/python3.10/dist-packages (23.1.2)\n","Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n","Requirement already satisfied: h5py>=2.10.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 1)) (3.8.0)\n","Requirement already satisfied: joblib>=0.16.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 2)) (1.2.0)\n","Requirement already satisfied: Markdown>=3.2.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 3)) (3.4.3)\n","Requirement already satisfied: matplotlib>=3.3.2 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 4)) (3.7.1)\n","Collecting jupyter>=1.0.0 (from -r requirements_GenNet.txt (line 5))\n"," Downloading jupyter-1.0.0-py2.py3-none-any.whl (2.7 kB)\n","Requirement already satisfied: numpy>=1.16.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 6)) (1.22.4)\n","Requirement already satisfied: pandas>=0.25.3 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 7)) (1.5.3)\n","Requirement already satisfied: Pillow>=7.2.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 8)) (8.4.0)\n","Requirement already satisfied: plotly>=4.12.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 9)) (5.13.1)\n","Requirement already satisfied: pyparsing>=2.4.7 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 10)) (3.0.9)\n","Requirement already satisfied: scikit-learn>=0.23.2 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 11)) (1.2.2)\n","Requirement already satisfied: scipy>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 12)) (1.10.1)\n","Requirement already satisfied: seaborn>=0.10.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 13)) (0.12.2)\n","Requirement already satisfied: tables>=3.6.1 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 14)) (3.8.0)\n","Requirement already satisfied: tqdm>=4.49.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 15)) (4.65.0)\n","Requirement already satisfied: zipp>=3.1.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 16)) (3.15.0)\n","Requirement already satisfied: pytest>=6.2.5 in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 17)) (7.2.2)\n","Collecting protobuf<=3.20.2,>=3.11 (from -r requirements_GenNet.txt (line 18))\n"," Downloading protobuf-3.20.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl (1.1 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.1/1.1 MB 56.2 MB/s eta 0:00:00\n","Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from -r requirements_GenNet.txt (line 19)) (5.9.5)\n","Collecting kaleido (from -r requirements_GenNet.txt (line 20))\n"," Downloading kaleido-0.2.1-py2.py3-none-manylinux1_x86_64.whl (79.9 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 79.9/79.9 MB 11.4 MB/s eta 0:00:00\n","Collecting tensorflow==2.8.0 (from -r requirements_GenNet.txt (line 21))\n"," Downloading tensorflow-2.8.0-cp310-cp310-manylinux2010_x86_64.whl (497.6 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 497.6/497.6 MB 2.9 MB/s eta 0:00:00\n","Collecting sklearn (from -r requirements_GenNet.txt (line 22))\n"," Downloading sklearn-0.0.post5.tar.gz (3.7 kB)\n"," Preparing metadata (setup.py): started\n"," Preparing metadata (setup.py): finished with status 'done'\n","Collecting bitarray (from -r requirements_GenNet.txt (line 23))\n"," Downloading bitarray-2.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (272 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 272.7/272.7 kB 33.6 MB/s eta 0:00:00\n","Requirement already satisfied: absl-py>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.4.0)\n","Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.6.3)\n","Requirement already satisfied: flatbuffers>=1.12 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (23.3.3)\n","Requirement already satisfied: gast>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.4.0)\n","Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.2.0)\n","Collecting keras-preprocessing>=1.1.1 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading Keras_Preprocessing-1.1.2-py2.py3-none-any.whl (42 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 42.6/42.6 kB 5.3 MB/s eta 0:00:00\n","Requirement already satisfied: libclang>=9.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (16.0.0)\n","Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (3.3.0)\n","Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (67.7.2)\n","Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.16.0)\n","Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.3.0)\n","Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (4.5.0)\n","Requirement already satisfied: wrapt>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.14.1)\n","Collecting tensorboard<2.9,>=2.8 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading tensorboard-2.8.0-py3-none-any.whl (5.8 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 5.8/5.8 MB 45.6 MB/s eta 0:00:00\n","Collecting tf-estimator-nightly==2.8.0.dev2021122109 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading tf_estimator_nightly-2.8.0.dev2021122109-py2.py3-none-any.whl (462 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 462.5/462.5 kB 46.6 MB/s eta 0:00:00\n","Collecting keras<2.9,>=2.8.0rc0 (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading keras-2.8.0-py2.py3-none-any.whl (1.4 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.4/1.4 MB 80.4 MB/s eta 0:00:00\n","Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.32.0)\n","Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.54.0)\n","Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (1.0.7)\n","Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (0.11.0)\n","Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (4.39.3)\n","Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (1.4.4)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (23.1)\n","Requirement already satisfied: python-dateutil>=2.7 in /usr/local/lib/python3.10/dist-packages (from matplotlib>=3.3.2->-r requirements_GenNet.txt (line 4)) (2.8.2)\n","Requirement already satisfied: notebook in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.4.8)\n","Collecting qtconsole (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5))\n"," Downloading qtconsole-5.4.3-py3-none-any.whl (121 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 121.9/121.9 kB 15.9 MB/s eta 0:00:00\n","Requirement already satisfied: jupyter-console in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.1.0)\n","Requirement already satisfied: nbconvert in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.5.4)\n","Requirement already satisfied: ipykernel in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.5.6)\n","Requirement already satisfied: ipywidgets in /usr/local/lib/python3.10/dist-packages (from jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (7.7.1)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.25.3->-r requirements_GenNet.txt (line 7)) (2022.7.1)\n","Requirement already satisfied: tenacity>=6.2.0 in /usr/local/lib/python3.10/dist-packages (from plotly>=4.12.0->-r requirements_GenNet.txt (line 9)) (8.2.2)\n","Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.23.2->-r requirements_GenNet.txt (line 11)) (3.1.0)\n","Requirement already satisfied: cython>=0.29.21 in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (0.29.34)\n","Requirement already satisfied: numexpr>=2.6.2 in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (2.8.4)\n","Requirement already satisfied: blosc2~=2.0.0 in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (2.0.0)\n","Requirement already satisfied: py-cpuinfo in /usr/local/lib/python3.10/dist-packages (from tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (9.0.0)\n","Requirement already satisfied: attrs>=19.2.0 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (23.1.0)\n","Requirement already satisfied: iniconfig in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (2.0.0)\n","Requirement already satisfied: pluggy<2.0,>=0.12 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (1.0.0)\n","Requirement already satisfied: exceptiongroup>=1.0.0rc8 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (1.1.1)\n","Requirement already satisfied: tomli>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from pytest>=6.2.5->-r requirements_GenNet.txt (line 17)) (2.0.1)\n","Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.40.0)\n","Requirement already satisfied: msgpack in /usr/local/lib/python3.10/dist-packages (from blosc2~=2.0.0->tables>=3.6.1->-r requirements_GenNet.txt (line 14)) (1.0.5)\n","Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.17.3)\n","Collecting google-auth-oauthlib<0.5,>=0.4.1 (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading google_auth_oauthlib-0.4.6-py2.py3-none-any.whl (18 kB)\n","Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.27.1)\n","Collecting tensorboard-data-server<0.7.0,>=0.6.0 (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21))\n"," Downloading tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl (4.9 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 4.9/4.9 MB 100.8 MB/s eta 0:00:00\n","Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.8.1)\n","Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.3.0)\n","Requirement already satisfied: ipython-genutils in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.0)\n","Requirement already satisfied: ipython>=5.0.0 in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (7.34.0)\n","Requirement already satisfied: traitlets>=4.1.0 in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.7.1)\n","Requirement already satisfied: jupyter-client in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.1.12)\n","Requirement already satisfied: tornado>=4.2 in /usr/local/lib/python3.10/dist-packages (from ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.3.1)\n","Requirement already satisfied: widgetsnbextension~=3.6.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.6.4)\n","Requirement already satisfied: jupyterlab-widgets>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from ipywidgets->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.0.7)\n","Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from jupyter-console->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.0.38)\n","Requirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from jupyter-console->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.14.0)\n","Requirement already satisfied: lxml in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.9.2)\n","Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.11.2)\n","Requirement already satisfied: bleach in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (6.0.0)\n","Requirement already satisfied: defusedxml in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.1)\n","Requirement already satisfied: entrypoints>=0.2.2 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.4)\n","Requirement already satisfied: jinja2>=3.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.1.2)\n","Requirement already satisfied: jupyter-core>=4.7 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.3.0)\n","Requirement already satisfied: jupyterlab-pygments in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.2)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.1.2)\n","Requirement already satisfied: mistune<2,>=0.8.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.8.4)\n","Requirement already satisfied: nbclient>=0.5.0 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.4)\n","Requirement already satisfied: nbformat>=5.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (5.8.0)\n","Requirement already satisfied: pandocfilters>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.5.0)\n","Requirement already satisfied: tinycss2 in /usr/local/lib/python3.10/dist-packages (from nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.2.1)\n","Requirement already satisfied: pyzmq>=17 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (23.2.1)\n","Requirement already satisfied: argon2-cffi in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (21.3.0)\n","Requirement already satisfied: nest-asyncio>=1.5 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.5.6)\n","Requirement already satisfied: Send2Trash>=1.8.0 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.8.0)\n","Requirement already satisfied: terminado>=0.8.3 in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.17.1)\n","Requirement already satisfied: prometheus-client in /usr/local/lib/python3.10/dist-packages (from notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.16.0)\n","Collecting qtpy>=2.0.1 (from qtconsole->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5))\n"," Downloading QtPy-2.3.1-py3-none-any.whl (84 kB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 84.9/84.9 kB 11.8 MB/s eta 0:00:00\n","Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (5.3.0)\n","Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.3.0)\n","Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (4.9)\n","Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.3.1)\n","Collecting jedi>=0.16 (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5))\n"," Downloading jedi-0.18.2-py2.py3-none-any.whl (1.6 MB)\n"," ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 1.6/1.6 MB 85.2 MB/s eta 0:00:00\n","Requirement already satisfied: decorator in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.4.2)\n","Requirement already satisfied: pickleshare in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.5)\n","Requirement already satisfied: backcall in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.0)\n","Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.1.6)\n","Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.10/dist-packages (from ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.8.0)\n","Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.10/dist-packages (from jupyter-core>=4.7->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (3.3.0)\n","Requirement already satisfied: fastjsonschema in /usr/local/lib/python3.10/dist-packages (from nbformat>=5.1->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.16.3)\n","Requirement already satisfied: jsonschema>=2.6 in /usr/local/lib/python3.10/dist-packages (from nbformat>=5.1->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (4.3.3)\n","Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->jupyter-console->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.2.6)\n","Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (1.26.15)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2022.12.7)\n","Requirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (2.0.12)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (3.4)\n","Requirement already satisfied: ptyprocess in /usr/local/lib/python3.10/dist-packages (from terminado>=0.8.3->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.7.0)\n","Requirement already satisfied: argon2-cffi-bindings in /usr/local/lib/python3.10/dist-packages (from argon2-cffi->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (21.2.0)\n","Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.4.1)\n","Requirement already satisfied: webencodings in /usr/local/lib/python3.10/dist-packages (from bleach->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.5.1)\n","Requirement already satisfied: parso<0.9.0,>=0.8.0 in /usr/local/lib/python3.10/dist-packages (from jedi>=0.16->ipython>=5.0.0->ipykernel->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.8.3)\n","Requirement already satisfied: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=2.6->nbformat>=5.1->nbconvert->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (0.19.3)\n","Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (0.5.0)\n","Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.9,>=2.8->tensorflow==2.8.0->-r requirements_GenNet.txt (line 21)) (3.2.2)\n","Requirement already satisfied: cffi>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from argon2-cffi-bindings->argon2-cffi->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (1.15.1)\n","Requirement already satisfied: pycparser in /usr/local/lib/python3.10/dist-packages (from cffi>=1.0.1->argon2-cffi-bindings->argon2-cffi->notebook->jupyter>=1.0.0->-r requirements_GenNet.txt (line 5)) (2.21)\n","Building wheels for collected packages: sklearn\n"," Building wheel for sklearn (setup.py): started\n"," Building wheel for sklearn (setup.py): finished with status 'done'\n"," Created wheel for sklearn: filename=sklearn-0.0.post5-py3-none-any.whl size=2950 sha256=ad2ea45b01146a4e48e19fe9c2211cf9c5ef04652940a94ea7b81f018d19ea20\n"," Stored in directory: /root/.cache/pip/wheels/38/1f/8d/4f812c590e074c1e928f5cec67bf5053b71f38e2648739403a\n","Successfully built sklearn\n","Installing collected packages: tf-estimator-nightly, sklearn, keras, kaleido, bitarray, tensorboard-data-server, qtpy, protobuf, keras-preprocessing, jedi, google-auth-oauthlib, tensorboard, qtconsole, tensorflow, jupyter\n"," Attempting uninstall: keras\n"," Found existing installation: keras 2.12.0\n"," Uninstalling keras-2.12.0:\n"," Successfully uninstalled keras-2.12.0\n"," Attempting uninstall: tensorboard-data-server\n"," Found existing installation: tensorboard-data-server 0.7.0\n"," Uninstalling tensorboard-data-server-0.7.0:\n"," Successfully uninstalled tensorboard-data-server-0.7.0\n"," Attempting uninstall: protobuf\n"," Found existing installation: protobuf 3.20.3\n"," Uninstalling protobuf-3.20.3:\n"," Successfully uninstalled protobuf-3.20.3\n"," Attempting uninstall: google-auth-oauthlib\n"," Found existing installation: google-auth-oauthlib 1.0.0\n"," Uninstalling google-auth-oauthlib-1.0.0:\n"," Successfully uninstalled google-auth-oauthlib-1.0.0\n"," Attempting uninstall: tensorboard\n"," Found existing installation: tensorboard 2.12.2\n"," Uninstalling tensorboard-2.12.2:\n"," Successfully uninstalled tensorboard-2.12.2\n"," Attempting uninstall: tensorflow\n"," Found existing installation: tensorflow 2.12.0\n"," Uninstalling tensorflow-2.12.0:\n"," Successfully uninstalled tensorflow-2.12.0\n","Successfully installed bitarray-2.7.3 google-auth-oauthlib-0.4.6 jedi-0.18.2 jupyter-1.0.0 kaleido-0.2.1 keras-2.8.0 keras-preprocessing-1.1.2 protobuf-3.20.2 qtconsole-5.4.3 qtpy-2.3.1 sklearn-0.0.post5 tensorboard-2.8.0 tensorboard-data-server-0.6.1 tensorflow-2.8.0 tf-estimator-nightly-2.8.0.dev2021122109\n"]},{"output_type":"stream","name":"stderr","text":["ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n","tensorflow-metadata 1.13.1 requires protobuf<5,>=3.20.3, but you have protobuf 3.20.2 which is incompatible.\n"]}],"source":["#@title Default title text\n","%%bash\n"," # navigate to the created folder\n","pip3 install --upgrade pip # update pip is recommended but not a necessity\n","cd GenNet\n","pip install -r requirements_GenNet.txt # install all the requirementes voor GenNet."]},{"cell_type":"code","execution_count":3,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":4208,"status":"ok","timestamp":1684060267258,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"n3N5BRSGSkdE","outputId":"8a7ee225-f44f-4160-c688-3dce00b75539"},"outputs":[{"output_type":"stream","name":"stdout","text":["= or more then 2.0: tensorflow version is 2.8.0\n"]}],"source":["import os\n","os.chdir(\"/content/GenNet\")\n","import sys\n","sys.path.append(\"/content/root_build/\")\n","import glob\n","import numpy as np\n","import pandas as pd\n","#sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))\n","import matplotlib\n","\n","matplotlib.use('agg')\n","import tensorflow as tf\n","import tensorflow.keras as K\n","import scipy\n","import tables\n","tf.keras.backend.set_epsilon(0.0000001)\n","tf_version = tf.__version__ # ToDo use packaging.version\n","if tf_version <= '1.13.1':\n"," from GenNet_utils.LocallyDirectedConnected import LocallyDirected1D\n"," print('= or less then 1.13.1: tensorflow version is', tf_version)\n","elif tf_version >= '2.0':\n"," from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D\n"," print('= or more then 2.0: tensorflow version is', tf_version)\n","else:\n"," print(\"unexpected tensorflow version\")\n"," from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D"]},{"cell_type":"code","execution_count":10,"metadata":{"id":"9MFJ6F7Ab4VD","executionInfo":{"status":"ok","timestamp":1684062221863,"user_tz":-180,"elapsed":380,"user":{"displayName":"Alina F","userId":"17740631281554612245"}}},"outputs":[],"source":["def layer_block(model, mask, i, regression):\n"," \n"," if regression:\n"," activation_type=\"relu\"\n"," else:\n"," activation_type=\"tanh\"\n"," \n"," model = LocallyDirected1D(mask=mask, filters=1, input_shape=(mask.shape[0], 1),\n"," name=\"LocallyDirected_\" + str(i))(model)\n"," model = K.layers.Flatten()(model)\n"," model = K.layers.Activation(activation_type)(model)\n"," model = K.layers.BatchNormalization(center=False, scale=False)(model)\n"," return model\n","\n","\n","def add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain):\n"," if num_covariates > 0:\n"," model = activation_layer(model, regression, negative_values_ytrain)\n"," model = K.layers.concatenate([model, input_cov], axis=1)\n"," model = K.layers.BatchNormalization(center=False, scale=False)(model)\n"," model = K.layers.Dense(units=1, bias_initializer= tf.keras.initializers.Constant(mean_ytrain))(model)\n"," return model\n","\n","\n","def activation_layer(model, regression, negative_values_ytrain): \n"," if regression: \n"," if negative_values_ytrain:\n"," model = K.layers.Activation(\"linear\")(model)\n"," print('using a linear activation function')\n"," else:\n"," model = K.layers.Activation(\"relu\")(model)\n"," print('using a relu activation function')\n"," else:\n"," model = K.layers.Activation(\"sigmoid\")(model)\n"," \n"," return model\n","\n","def create_network_from_npz(datapath,\n"," inputsize,\n"," genotype_path,\n"," l1_value=0.01,\n"," regression=False,\n"," num_covariates=0,\n"," mask_order = []):\n"," print(\"Creating networks from npz masks\")\n"," print(\"regression\", regression)\n"," if regression:\n"," mean_ytrain, negative_values_ytrain = regression_properties(datapath)\n"," else:\n"," mean_ytrain = 0\n"," negative_values_ytrain = False\n","\n"," masks = []\n"," mask_shapes_x = []\n"," mask_shapes_y = []\n","\n"," print(mask_order)\n","\n"," if len(mask_order) > 0: # if mask_order is defined we use this order\n"," for mask in mask_order:\n"," mask = scipy.sparse.load_npz(datapath + '/'+str(mask)+'.npz')\n"," masks.append(mask)\n"," mask_shapes_x.append(mask.shape[0])\n"," mask_shapes_y.append(mask.shape[1])\n","\n"," for x in range(len(masks) - 1): # check that the masks fit eachother\n"," assert mask_shapes_y[x] == mask_shapes_x[x + 1]\n"," else:\n"," # if mask order is not defined we can sort the mask by the size\n"," for npz_path in glob.glob(datapath + '/*.npz'):\n"," mask = scipy.sparse.load_npz(npz_path)\n"," masks.append(mask)\n"," mask_shapes_x.append(mask.shape[0])\n"," mask_shapes_y.append(mask.shape[1])\n","\n"," for i in range(len(masks)): # sort all the masks in the correct order\n"," argsort_x = np.argsort(mask_shapes_x)[::-1]\n"," argsort_y = np.argsort(mask_shapes_y)[::-1]\n","\n"," mask_shapes_x = np.array(mask_shapes_x)\n"," mask_shapes_y = np.array(mask_shapes_y)\n"," assert all(argsort_x == argsort_y) # check that both dimensions have the same order\n","\n"," masks = [masks[i] for i in argsort_y] # sort masks\n"," mask_shapes_x = mask_shapes_x[argsort_x]\n"," mask_shapes_y = mask_shapes_y[argsort_y]\n","\n"," for x in range(len(masks) - 1): # check that the masks fit eachother\n"," assert mask_shapes_y[x] == mask_shapes_x[x + 1]\n"," print('mask_shapes_x[0]', mask_shapes_x[0])\n"," assert mask_shapes_x[0] == inputsize\n"," print('mask_shapes_y[-1]', mask_shapes_y[-1])\n"," if mask_shapes_y[-1] == 1: # should we end with a dense layer?\n"," all_masks_available = True\n"," else:\n"," all_masks_available = False\n","\n"," input_layer = K.Input((inputsize,), name='input_layer')\n"," input_cov = K.Input((num_covariates,), name='inputs_cov')\n","\n"," model = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(input_layer)\n","\n"," for i in range(len(masks)):\n"," mask = masks[i]\n"," model = layer_block(model, mask, i, regression)\n","\n"," model = K.layers.Flatten()(model)\n","\n"," if all_masks_available:\n"," model = LocallyDirected1D(mask=masks[-1], filters=1, input_shape=(mask.shape[0], 1),\n"," name=\"output_layer\")(model)\n"," else:\n"," model = K.layers.Dense(units=1, name=\"output_layer\",\n"," kernel_regularizer=tf.keras.regularizers.l1(l=l1_value)\n"," )(model)\n","\n"," model = add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain)\n","\n"," output_layer = activation_layer(model, regression, negative_values_ytrain)\n"," model = K.Model(inputs=[input_layer, input_cov], outputs=output_layer)\n","\n"," print(model.summary())\n","\n"," return model, masks"]},{"cell_type":"code","execution_count":5,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":250},"executionInfo":{"elapsed":338433,"status":"ok","timestamp":1684060617598,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"12RtThI3TDzs","outputId":"b6bc6181-d6ec-4dcf-d2fe-32823bf1b46d"},"outputs":[{"output_type":"display_data","data":{"text/plain":["<IPython.core.display.HTML object>"],"text/html":["\n"," <input type=\"file\" id=\"files-d283489a-4711-494b-b4c1-f260ab799663\" name=\"files[]\" multiple disabled\n"," style=\"border:none\" />\n"," <output id=\"result-d283489a-4711-494b-b4c1-f260ab799663\">\n"," Upload widget is only available when the cell has been executed in the\n"," current browser session. Please rerun this cell to enable.\n"," </output>\n"," <script>// Copyright 2017 Google LLC\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","/**\n"," * @fileoverview Helpers for google.colab Python module.\n"," */\n","(function(scope) {\n","function span(text, styleAttributes = {}) {\n"," const element = document.createElement('span');\n"," element.textContent = text;\n"," for (const key of Object.keys(styleAttributes)) {\n"," element.style[key] = styleAttributes[key];\n"," }\n"," return element;\n","}\n","\n","// Max number of bytes which will be uploaded at a time.\n","const MAX_PAYLOAD_SIZE = 100 * 1024;\n","\n","function _uploadFiles(inputId, outputId) {\n"," const steps = uploadFilesStep(inputId, outputId);\n"," const outputElement = document.getElementById(outputId);\n"," // Cache steps on the outputElement to make it available for the next call\n"," // to uploadFilesContinue from Python.\n"," outputElement.steps = steps;\n","\n"," return _uploadFilesContinue(outputId);\n","}\n","\n","// This is roughly an async generator (not supported in the browser yet),\n","// where there are multiple asynchronous steps and the Python side is going\n","// to poll for completion of each step.\n","// This uses a Promise to block the python side on completion of each step,\n","// then passes the result of the previous step as the input to the next step.\n","function _uploadFilesContinue(outputId) {\n"," const outputElement = document.getElementById(outputId);\n"," const steps = outputElement.steps;\n","\n"," const next = steps.next(outputElement.lastPromiseValue);\n"," return Promise.resolve(next.value.promise).then((value) => {\n"," // Cache the last promise value to make it available to the next\n"," // step of the generator.\n"," outputElement.lastPromiseValue = value;\n"," return next.value.response;\n"," });\n","}\n","\n","/**\n"," * Generator function which is called between each async step of the upload\n"," * process.\n"," * @param {string} inputId Element ID of the input file picker element.\n"," * @param {string} outputId Element ID of the output display.\n"," * @return {!Iterable<!Object>} Iterable of next steps.\n"," */\n","function* uploadFilesStep(inputId, outputId) {\n"," const inputElement = document.getElementById(inputId);\n"," inputElement.disabled = false;\n","\n"," const outputElement = document.getElementById(outputId);\n"," outputElement.innerHTML = '';\n","\n"," const pickedPromise = new Promise((resolve) => {\n"," inputElement.addEventListener('change', (e) => {\n"," resolve(e.target.files);\n"," });\n"," });\n","\n"," const cancel = document.createElement('button');\n"," inputElement.parentElement.appendChild(cancel);\n"," cancel.textContent = 'Cancel upload';\n"," const cancelPromise = new Promise((resolve) => {\n"," cancel.onclick = () => {\n"," resolve(null);\n"," };\n"," });\n","\n"," // Wait for the user to pick the files.\n"," const files = yield {\n"," promise: Promise.race([pickedPromise, cancelPromise]),\n"," response: {\n"," action: 'starting',\n"," }\n"," };\n","\n"," cancel.remove();\n","\n"," // Disable the input element since further picks are not allowed.\n"," inputElement.disabled = true;\n","\n"," if (!files) {\n"," return {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n"," }\n","\n"," for (const file of files) {\n"," const li = document.createElement('li');\n"," li.append(span(file.name, {fontWeight: 'bold'}));\n"," li.append(span(\n"," `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n"," `last modified: ${\n"," file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n"," 'n/a'} - `));\n"," const percent = span('0% done');\n"," li.appendChild(percent);\n","\n"," outputElement.appendChild(li);\n","\n"," const fileDataPromise = new Promise((resolve) => {\n"," const reader = new FileReader();\n"," reader.onload = (e) => {\n"," resolve(e.target.result);\n"," };\n"," reader.readAsArrayBuffer(file);\n"," });\n"," // Wait for the data to be ready.\n"," let fileData = yield {\n"," promise: fileDataPromise,\n"," response: {\n"," action: 'continue',\n"," }\n"," };\n","\n"," // Use a chunked sending to avoid message size limits. See b/62115660.\n"," let position = 0;\n"," do {\n"," const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n"," const chunk = new Uint8Array(fileData, position, length);\n"," position += length;\n","\n"," const base64 = btoa(String.fromCharCode.apply(null, chunk));\n"," yield {\n"," response: {\n"," action: 'append',\n"," file: file.name,\n"," data: base64,\n"," },\n"," };\n","\n"," let percentDone = fileData.byteLength === 0 ?\n"," 100 :\n"," Math.round((position / fileData.byteLength) * 100);\n"," percent.textContent = `${percentDone}% done`;\n","\n"," } while (position < fileData.byteLength);\n"," }\n","\n"," // All done.\n"," yield {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n","}\n","\n","scope.google = scope.google || {};\n","scope.google.colab = scope.google.colab || {};\n","scope.google.colab._files = {\n"," _uploadFiles,\n"," _uploadFilesContinue,\n","};\n","})(self);\n","</script> "]},"metadata":{}},{"output_type":"stream","name":"stdout","text":["Saving gene_ensmbl_GTEx_brain_mask_tstat.npz to gene_ensmbl_GTEx_brain_mask_tstat.npz\n","Saving gene_ensmbl_GTEx_mask_tstat.npz to gene_ensmbl_GTEx_mask_tstat.npz\n","Saving gene_ensmbl_ImmGen_mask_tstat.npz to gene_ensmbl_ImmGen_mask_tstat.npz\n","Saving SNP_exon_mask.npz to SNP_exon_mask.npz\n","Saving UKBB_sparse_connection_mask_ensmb_alligned.npz to UKBB_sparse_connection_mask_ensmb_alligned.npz\n","Saving UKBB_sparse_connection_mask_refseq_alligned.npz to UKBB_sparse_connection_mask_refseq_alligned.npz\n"]}],"source":["from google.colab import files\n","uploaded = files.upload()"]},{"cell_type":"code","execution_count":6,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":74},"executionInfo":{"elapsed":894896,"status":"ok","timestamp":1684061695271,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"lu-oDA-NYtFQ","outputId":"6cc04171-6b01-465d-c904-467a92e171b0"},"outputs":[{"output_type":"display_data","data":{"text/plain":["<IPython.core.display.HTML object>"],"text/html":["\n"," <input type=\"file\" id=\"files-8cb2db4a-c23b-4794-a381-784949e50d41\" name=\"files[]\" multiple disabled\n"," style=\"border:none\" />\n"," <output id=\"result-8cb2db4a-c23b-4794-a381-784949e50d41\">\n"," Upload widget is only available when the cell has been executed in the\n"," current browser session. Please rerun this cell to enable.\n"," </output>\n"," <script>// Copyright 2017 Google LLC\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","/**\n"," * @fileoverview Helpers for google.colab Python module.\n"," */\n","(function(scope) {\n","function span(text, styleAttributes = {}) {\n"," const element = document.createElement('span');\n"," element.textContent = text;\n"," for (const key of Object.keys(styleAttributes)) {\n"," element.style[key] = styleAttributes[key];\n"," }\n"," return element;\n","}\n","\n","// Max number of bytes which will be uploaded at a time.\n","const MAX_PAYLOAD_SIZE = 100 * 1024;\n","\n","function _uploadFiles(inputId, outputId) {\n"," const steps = uploadFilesStep(inputId, outputId);\n"," const outputElement = document.getElementById(outputId);\n"," // Cache steps on the outputElement to make it available for the next call\n"," // to uploadFilesContinue from Python.\n"," outputElement.steps = steps;\n","\n"," return _uploadFilesContinue(outputId);\n","}\n","\n","// This is roughly an async generator (not supported in the browser yet),\n","// where there are multiple asynchronous steps and the Python side is going\n","// to poll for completion of each step.\n","// This uses a Promise to block the python side on completion of each step,\n","// then passes the result of the previous step as the input to the next step.\n","function _uploadFilesContinue(outputId) {\n"," const outputElement = document.getElementById(outputId);\n"," const steps = outputElement.steps;\n","\n"," const next = steps.next(outputElement.lastPromiseValue);\n"," return Promise.resolve(next.value.promise).then((value) => {\n"," // Cache the last promise value to make it available to the next\n"," // step of the generator.\n"," outputElement.lastPromiseValue = value;\n"," return next.value.response;\n"," });\n","}\n","\n","/**\n"," * Generator function which is called between each async step of the upload\n"," * process.\n"," * @param {string} inputId Element ID of the input file picker element.\n"," * @param {string} outputId Element ID of the output display.\n"," * @return {!Iterable<!Object>} Iterable of next steps.\n"," */\n","function* uploadFilesStep(inputId, outputId) {\n"," const inputElement = document.getElementById(inputId);\n"," inputElement.disabled = false;\n","\n"," const outputElement = document.getElementById(outputId);\n"," outputElement.innerHTML = '';\n","\n"," const pickedPromise = new Promise((resolve) => {\n"," inputElement.addEventListener('change', (e) => {\n"," resolve(e.target.files);\n"," });\n"," });\n","\n"," const cancel = document.createElement('button');\n"," inputElement.parentElement.appendChild(cancel);\n"," cancel.textContent = 'Cancel upload';\n"," const cancelPromise = new Promise((resolve) => {\n"," cancel.onclick = () => {\n"," resolve(null);\n"," };\n"," });\n","\n"," // Wait for the user to pick the files.\n"," const files = yield {\n"," promise: Promise.race([pickedPromise, cancelPromise]),\n"," response: {\n"," action: 'starting',\n"," }\n"," };\n","\n"," cancel.remove();\n","\n"," // Disable the input element since further picks are not allowed.\n"," inputElement.disabled = true;\n","\n"," if (!files) {\n"," return {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n"," }\n","\n"," for (const file of files) {\n"," const li = document.createElement('li');\n"," li.append(span(file.name, {fontWeight: 'bold'}));\n"," li.append(span(\n"," `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n"," `last modified: ${\n"," file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n"," 'n/a'} - `));\n"," const percent = span('0% done');\n"," li.appendChild(percent);\n","\n"," outputElement.appendChild(li);\n","\n"," const fileDataPromise = new Promise((resolve) => {\n"," const reader = new FileReader();\n"," reader.onload = (e) => {\n"," resolve(e.target.result);\n"," };\n"," reader.readAsArrayBuffer(file);\n"," });\n"," // Wait for the data to be ready.\n"," let fileData = yield {\n"," promise: fileDataPromise,\n"," response: {\n"," action: 'continue',\n"," }\n"," };\n","\n"," // Use a chunked sending to avoid message size limits. See b/62115660.\n"," let position = 0;\n"," do {\n"," const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n"," const chunk = new Uint8Array(fileData, position, length);\n"," position += length;\n","\n"," const base64 = btoa(String.fromCharCode.apply(null, chunk));\n"," yield {\n"," response: {\n"," action: 'append',\n"," file: file.name,\n"," data: base64,\n"," },\n"," };\n","\n"," let percentDone = fileData.byteLength === 0 ?\n"," 100 :\n"," Math.round((position / fileData.byteLength) * 100);\n"," percent.textContent = `${percentDone}% done`;\n","\n"," } while (position < fileData.byteLength);\n"," }\n","\n"," // All done.\n"," yield {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n","}\n","\n","scope.google = scope.google || {};\n","scope.google.colab = scope.google.colab || {};\n","scope.google.colab._files = {\n"," _uploadFiles,\n"," _uploadFilesContinue,\n","};\n","})(self);\n","</script> "]},"metadata":{}},{"output_type":"stream","name":"stdout","text":["Saving bestweight_job_diabetes.h5 to bestweight_job_diabetes.h5\n"]}],"source":["from google.colab import files\n","uploaded = files.upload()"]},{"cell_type":"code","execution_count":8,"metadata":{"id":"bODh5UtsZaWd","executionInfo":{"status":"ok","timestamp":1684061908841,"user_tz":-180,"elapsed":2,"user":{"displayName":"Alina F","userId":"17740631281554612245"}}},"outputs":[],"source":["from tensorflow.python.framework.ops import disable_eager_execution\n","\n","disable_eager_execution()"]},{"cell_type":"code","execution_count":11,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":124677,"status":"ok","timestamp":1684062352240,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"T5dG7o4jUK2p","outputId":"b194f971-ba0e-4ac2-c7f4-7315873570c5"},"outputs":[{"output_type":"stream","name":"stdout","text":["Creating networks from npz masks\n","regression False\n","['UKBB_sparse_connection_mask_refseq_alligned']\n","mask_shapes_x[0] 6986636\n","mask_shapes_y[-1] 15827\n","Model: \"model_1\"\n","__________________________________________________________________________________________________\n"," Layer (type) Output Shape Param # Connected to \n","==================================================================================================\n"," input_layer (InputLayer) [(None, 6986636)] 0 [] \n"," \n"," reshape_2 (Reshape) (None, 6986636, 1) 0 ['input_layer[0][0]'] \n"," \n"," flatten_1 (Flatten) (None, 15827) 0 ['LocallyDirected_0[0][0]'] \n"," \n"," activation_2 (Activation) (None, 15827) 0 ['flatten_1[0][0]'] \n"," \n"," batch_normalization_1 (BatchNo (None, 15827) 31654 ['activation_2[0][0]'] \n"," rmalization) \n"," \n"," flatten_2 (Flatten) (None, 15827) 0 ['batch_normalization_1[0][0]'] \n"," \n"," output_layer (Dense) (None, 1) 15828 ['flatten_2[0][0]'] \n"," \n"," inputs_cov (InputLayer) [(None, 0)] 0 [] \n"," \n"," activation_3 (Activation) (None, 1) 0 ['output_layer[0][0]'] \n"," \n","==================================================================================================\n","Total params: 6,708,718\n","Trainable params: 6,677,064\n","Non-trainable params: 31,654\n","__________________________________________________________________________________________________\n","None\n"]}],"source":["\"experiment\"\n","datapath = '/content/GenNet/'\n","inputsize = 6986636\n","num_covariates = 0\n","genotype_path = datapath\n","l1_value = 0.001\n","model, masks = create_network_from_npz(datapath=datapath, inputsize=inputsize, genotype_path=genotype_path,mask_order=['UKBB_sparse_connection_mask_refseq_alligned'],\n"," l1_value=l1_value, regression=False, num_covariates=num_covariates, )"]},{"cell_type":"code","execution_count":16,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":357},"executionInfo":{"elapsed":299199,"status":"error","timestamp":1684063000397,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"SNkG00Fv_p5f","outputId":"905d54c2-a7ce-4ec6-8ce4-3f6356ef4aaf"},"outputs":[{"output_type":"display_data","data":{"text/plain":["<IPython.core.display.HTML object>"],"text/html":["\n"," <input type=\"file\" id=\"files-639d355e-0e7b-4fea-b74d-3a8eb1dd6cc9\" name=\"files[]\" multiple disabled\n"," style=\"border:none\" />\n"," <output id=\"result-639d355e-0e7b-4fea-b74d-3a8eb1dd6cc9\">\n"," Upload widget is only available when the cell has been executed in the\n"," current browser session. Please rerun this cell to enable.\n"," </output>\n"," <script>// Copyright 2017 Google LLC\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","/**\n"," * @fileoverview Helpers for google.colab Python module.\n"," */\n","(function(scope) {\n","function span(text, styleAttributes = {}) {\n"," const element = document.createElement('span');\n"," element.textContent = text;\n"," for (const key of Object.keys(styleAttributes)) {\n"," element.style[key] = styleAttributes[key];\n"," }\n"," return element;\n","}\n","\n","// Max number of bytes which will be uploaded at a time.\n","const MAX_PAYLOAD_SIZE = 100 * 1024;\n","\n","function _uploadFiles(inputId, outputId) {\n"," const steps = uploadFilesStep(inputId, outputId);\n"," const outputElement = document.getElementById(outputId);\n"," // Cache steps on the outputElement to make it available for the next call\n"," // to uploadFilesContinue from Python.\n"," outputElement.steps = steps;\n","\n"," return _uploadFilesContinue(outputId);\n","}\n","\n","// This is roughly an async generator (not supported in the browser yet),\n","// where there are multiple asynchronous steps and the Python side is going\n","// to poll for completion of each step.\n","// This uses a Promise to block the python side on completion of each step,\n","// then passes the result of the previous step as the input to the next step.\n","function _uploadFilesContinue(outputId) {\n"," const outputElement = document.getElementById(outputId);\n"," const steps = outputElement.steps;\n","\n"," const next = steps.next(outputElement.lastPromiseValue);\n"," return Promise.resolve(next.value.promise).then((value) => {\n"," // Cache the last promise value to make it available to the next\n"," // step of the generator.\n"," outputElement.lastPromiseValue = value;\n"," return next.value.response;\n"," });\n","}\n","\n","/**\n"," * Generator function which is called between each async step of the upload\n"," * process.\n"," * @param {string} inputId Element ID of the input file picker element.\n"," * @param {string} outputId Element ID of the output display.\n"," * @return {!Iterable<!Object>} Iterable of next steps.\n"," */\n","function* uploadFilesStep(inputId, outputId) {\n"," const inputElement = document.getElementById(inputId);\n"," inputElement.disabled = false;\n","\n"," const outputElement = document.getElementById(outputId);\n"," outputElement.innerHTML = '';\n","\n"," const pickedPromise = new Promise((resolve) => {\n"," inputElement.addEventListener('change', (e) => {\n"," resolve(e.target.files);\n"," });\n"," });\n","\n"," const cancel = document.createElement('button');\n"," inputElement.parentElement.appendChild(cancel);\n"," cancel.textContent = 'Cancel upload';\n"," const cancelPromise = new Promise((resolve) => {\n"," cancel.onclick = () => {\n"," resolve(null);\n"," };\n"," });\n","\n"," // Wait for the user to pick the files.\n"," const files = yield {\n"," promise: Promise.race([pickedPromise, cancelPromise]),\n"," response: {\n"," action: 'starting',\n"," }\n"," };\n","\n"," cancel.remove();\n","\n"," // Disable the input element since further picks are not allowed.\n"," inputElement.disabled = true;\n","\n"," if (!files) {\n"," return {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n"," }\n","\n"," for (const file of files) {\n"," const li = document.createElement('li');\n"," li.append(span(file.name, {fontWeight: 'bold'}));\n"," li.append(span(\n"," `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n"," `last modified: ${\n"," file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n"," 'n/a'} - `));\n"," const percent = span('0% done');\n"," li.appendChild(percent);\n","\n"," outputElement.appendChild(li);\n","\n"," const fileDataPromise = new Promise((resolve) => {\n"," const reader = new FileReader();\n"," reader.onload = (e) => {\n"," resolve(e.target.result);\n"," };\n"," reader.readAsArrayBuffer(file);\n"," });\n"," // Wait for the data to be ready.\n"," let fileData = yield {\n"," promise: fileDataPromise,\n"," response: {\n"," action: 'continue',\n"," }\n"," };\n","\n"," // Use a chunked sending to avoid message size limits. See b/62115660.\n"," let position = 0;\n"," do {\n"," const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n"," const chunk = new Uint8Array(fileData, position, length);\n"," position += length;\n","\n"," const base64 = btoa(String.fromCharCode.apply(null, chunk));\n"," yield {\n"," response: {\n"," action: 'append',\n"," file: file.name,\n"," data: base64,\n"," },\n"," };\n","\n"," let percentDone = fileData.byteLength === 0 ?\n"," 100 :\n"," Math.round((position / fileData.byteLength) * 100);\n"," percent.textContent = `${percentDone}% done`;\n","\n"," } while (position < fileData.byteLength);\n"," }\n","\n"," // All done.\n"," yield {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n","}\n","\n","scope.google = scope.google || {};\n","scope.google.colab = scope.google.colab || {};\n","scope.google.colab._files = {\n"," _uploadFiles,\n"," _uploadFilesContinue,\n","};\n","})(self);\n","</script> "]},"metadata":{}},{"output_type":"error","ename":"KeyboardInterrupt","evalue":"ignored","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)","\u001b[0;32m<ipython-input-16-21dc3c638f66>\u001b[0m in \u001b[0;36m<cell line: 2>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mgoogle\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcolab\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mfiles\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0muploaded\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfiles\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/google/colab/files.py\u001b[0m in \u001b[0;36mupload\u001b[0;34m()\u001b[0m\n\u001b[1;32m 67\u001b[0m \"\"\"\n\u001b[1;32m 68\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 69\u001b[0;31m \u001b[0muploaded_files\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_upload_files\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmultiple\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 70\u001b[0m \u001b[0;31m# Mapping from original filename to filename as saved locally.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 71\u001b[0m \u001b[0mlocal_filenames\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/google/colab/files.py\u001b[0m in \u001b[0;36m_upload_files\u001b[0;34m(multiple)\u001b[0m\n\u001b[1;32m 151\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[0;31m# First result is always an indication that the file picker has completed.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 153\u001b[0;31m result = _output.eval_js(\n\u001b[0m\u001b[1;32m 154\u001b[0m 'google.colab._files._uploadFiles(\"{input_id}\", \"{output_id}\")'.format(\n\u001b[1;32m 155\u001b[0m \u001b[0minput_id\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minput_id\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moutput_id\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0moutput_id\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/google/colab/output/_js.py\u001b[0m in \u001b[0;36meval_js\u001b[0;34m(script, ignore_result, timeout_sec)\u001b[0m\n\u001b[1;32m 38\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mignore_result\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 39\u001b[0m \u001b[0;32mreturn\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 40\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0m_message\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_reply_from_input\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrequest_id\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout_sec\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 41\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/google/colab/_message.py\u001b[0m in \u001b[0;36mread_reply_from_input\u001b[0;34m(message_id, timeout_sec)\u001b[0m\n\u001b[1;32m 94\u001b[0m \u001b[0mreply\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_read_next_input_message\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 95\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mreply\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0m_NOT_READY\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mreply\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 96\u001b[0;31m \u001b[0mtime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msleep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0.025\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 97\u001b[0m \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 98\u001b[0m if (\n","\u001b[0;31mKeyboardInterrupt\u001b[0m: "]}],"source":["# upload bestweights.h5\n","from google.colab import files\n","uploaded = files.upload()"]},{"cell_type":"code","execution_count":15,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":318},"executionInfo":{"elapsed":289,"status":"error","timestamp":1684062632754,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"VAlSA3JGFB5j","outputId":"daccc09a-a3dc-4ea8-e878-15041181947b"},"outputs":[{"output_type":"error","ename":"ValueError","evalue":"ignored","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)","\u001b[0;32m<ipython-input-15-8c2ab66ed495>\u001b[0m in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_weights\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'/content/GenNet/bestweight_job_diabetes.h5'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/engine/training_v1.py\u001b[0m in \u001b[0;36mload_weights\u001b[0;34m(self, filepath, by_name, skip_mismatch)\u001b[0m\n\u001b[1;32m 212\u001b[0m raise ValueError('Load weights is not yet supported with TPUStrategy '\n\u001b[1;32m 213\u001b[0m 'with steps_per_run greater than 1.')\n\u001b[0;32m--> 214\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mModel\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_weights\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mby_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mskip_mismatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 215\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 216\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__internal__\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtracking\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mno_automatic_dependency_tracking\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/utils/traceback_utils.py\u001b[0m in \u001b[0;36merror_handler\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 65\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# pylint: disable=broad-except\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 66\u001b[0m \u001b[0mfiltered_tb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_process_traceback_frames\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__traceback__\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 67\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwith_traceback\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfiltered_tb\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 68\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 69\u001b[0m \u001b[0;32mdel\u001b[0m \u001b[0mfiltered_tb\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.10/dist-packages/keras/saving/hdf5_format.py\u001b[0m in \u001b[0;36mload_weights_from_hdf5_group\u001b[0;34m(f, model)\u001b[0m\n\u001b[1;32m 726\u001b[0m \u001b[0mlayer_names\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfiltered_layer_names\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 727\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlayer_names\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfiltered_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 728\u001b[0;31m raise ValueError(\n\u001b[0m\u001b[1;32m 729\u001b[0m \u001b[0;34mf'Layer count mismatch when loading weights from file. '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 730\u001b[0m \u001b[0;34mf'Model expected {len(filtered_layers)} layers, found '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mValueError\u001b[0m: Layer count mismatch when loading weights from file. Model expected 2 layers, found 3 saved layers."]}],"source":["model.load_weights('/content/GenNet/bestweight_job_diabetes.h5')"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":18359,"status":"ok","timestamp":1683823851299,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-180},"id":"DV0kBxoihpPm","outputId":"45f088d2-875a-46f2-9b02-88c050bdb3c4"},"outputs":[{"name":"stdout","output_type":"stream","text":["Model: \"model_7\"\n","__________________________________________________________________________________________________\n"," Layer (type) Output Shape Param # Connected to \n","==================================================================================================\n"," input_layer (InputLayer) [(None, 6986636)] 0 [] \n"," \n"," reshape_7 (Reshape) (None, 6986636, 1) 0 ['input_layer[0][0]'] \n"," \n"," activation_15 (Activation) (None, 21476, 1) 0 ['LocallyDirected_0[0][0]'] \n"," \n"," batch_normalization_8 (BatchNo (None, 21476, 1) 2 ['activation_15[0][0]'] \n"," rmalization) \n"," \n"," activation_16 (Activation) (None, 292, 1) 0 ['LocallyDirected_1[0][0]'] \n"," \n"," batch_normalization_9 (BatchNo (None, 292, 1) 2 ['activation_16[0][0]'] \n"," rmalization) \n"," \n"," flatten_7 (Flatten) (None, 292) 0 ['batch_normalization_9[0][0]'] \n"," \n"," output_layer (Dense) (None, 1) 293 ['flatten_7[0][0]'] \n"," \n"," inputs_cov (InputLayer) [(None, 0)] 0 [] \n"," \n"," activation_17 (Activation) (None, 1) 0 ['output_layer[0][0]'] \n"," \n","==================================================================================================\n","Total params: 6,984,918\n","Trainable params: 6,984,914\n","Non-trainable params: 4\n","__________________________________________________________________________________________________\n"]}],"source":["model.summary()"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":73},"executionInfo":{"elapsed":10768,"status":"ok","timestamp":1677750425067,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-240},"id":"1BVGwltjh1EJ","outputId":"34ac6187-5d3b-42fc-b1ad-6a1f1d4ece72"},"outputs":[{"data":{"text/html":["\n"," <input type=\"file\" id=\"files-8ca0acdd-2479-4d57-8937-b904c3b9f945\" name=\"files[]\" multiple disabled\n"," style=\"border:none\" />\n"," <output id=\"result-8ca0acdd-2479-4d57-8937-b904c3b9f945\">\n"," Upload widget is only available when the cell has been executed in the\n"," current browser session. Please rerun this cell to enable.\n"," </output>\n"," <script>// Copyright 2017 Google LLC\n","//\n","// Licensed under the Apache License, Version 2.0 (the \"License\");\n","// you may not use this file except in compliance with the License.\n","// You may obtain a copy of the License at\n","//\n","// http://www.apache.org/licenses/LICENSE-2.0\n","//\n","// Unless required by applicable law or agreed to in writing, software\n","// distributed under the License is distributed on an \"AS IS\" BASIS,\n","// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n","// See the License for the specific language governing permissions and\n","// limitations under the License.\n","\n","/**\n"," * @fileoverview Helpers for google.colab Python module.\n"," */\n","(function(scope) {\n","function span(text, styleAttributes = {}) {\n"," const element = document.createElement('span');\n"," element.textContent = text;\n"," for (const key of Object.keys(styleAttributes)) {\n"," element.style[key] = styleAttributes[key];\n"," }\n"," return element;\n","}\n","\n","// Max number of bytes which will be uploaded at a time.\n","const MAX_PAYLOAD_SIZE = 100 * 1024;\n","\n","function _uploadFiles(inputId, outputId) {\n"," const steps = uploadFilesStep(inputId, outputId);\n"," const outputElement = document.getElementById(outputId);\n"," // Cache steps on the outputElement to make it available for the next call\n"," // to uploadFilesContinue from Python.\n"," outputElement.steps = steps;\n","\n"," return _uploadFilesContinue(outputId);\n","}\n","\n","// This is roughly an async generator (not supported in the browser yet),\n","// where there are multiple asynchronous steps and the Python side is going\n","// to poll for completion of each step.\n","// This uses a Promise to block the python side on completion of each step,\n","// then passes the result of the previous step as the input to the next step.\n","function _uploadFilesContinue(outputId) {\n"," const outputElement = document.getElementById(outputId);\n"," const steps = outputElement.steps;\n","\n"," const next = steps.next(outputElement.lastPromiseValue);\n"," return Promise.resolve(next.value.promise).then((value) => {\n"," // Cache the last promise value to make it available to the next\n"," // step of the generator.\n"," outputElement.lastPromiseValue = value;\n"," return next.value.response;\n"," });\n","}\n","\n","/**\n"," * Generator function which is called between each async step of the upload\n"," * process.\n"," * @param {string} inputId Element ID of the input file picker element.\n"," * @param {string} outputId Element ID of the output display.\n"," * @return {!Iterable<!Object>} Iterable of next steps.\n"," */\n","function* uploadFilesStep(inputId, outputId) {\n"," const inputElement = document.getElementById(inputId);\n"," inputElement.disabled = false;\n","\n"," const outputElement = document.getElementById(outputId);\n"," outputElement.innerHTML = '';\n","\n"," const pickedPromise = new Promise((resolve) => {\n"," inputElement.addEventListener('change', (e) => {\n"," resolve(e.target.files);\n"," });\n"," });\n","\n"," const cancel = document.createElement('button');\n"," inputElement.parentElement.appendChild(cancel);\n"," cancel.textContent = 'Cancel upload';\n"," const cancelPromise = new Promise((resolve) => {\n"," cancel.onclick = () => {\n"," resolve(null);\n"," };\n"," });\n","\n"," // Wait for the user to pick the files.\n"," const files = yield {\n"," promise: Promise.race([pickedPromise, cancelPromise]),\n"," response: {\n"," action: 'starting',\n"," }\n"," };\n","\n"," cancel.remove();\n","\n"," // Disable the input element since further picks are not allowed.\n"," inputElement.disabled = true;\n","\n"," if (!files) {\n"," return {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n"," }\n","\n"," for (const file of files) {\n"," const li = document.createElement('li');\n"," li.append(span(file.name, {fontWeight: 'bold'}));\n"," li.append(span(\n"," `(${file.type || 'n/a'}) - ${file.size} bytes, ` +\n"," `last modified: ${\n"," file.lastModifiedDate ? file.lastModifiedDate.toLocaleDateString() :\n"," 'n/a'} - `));\n"," const percent = span('0% done');\n"," li.appendChild(percent);\n","\n"," outputElement.appendChild(li);\n","\n"," const fileDataPromise = new Promise((resolve) => {\n"," const reader = new FileReader();\n"," reader.onload = (e) => {\n"," resolve(e.target.result);\n"," };\n"," reader.readAsArrayBuffer(file);\n"," });\n"," // Wait for the data to be ready.\n"," let fileData = yield {\n"," promise: fileDataPromise,\n"," response: {\n"," action: 'continue',\n"," }\n"," };\n","\n"," // Use a chunked sending to avoid message size limits. See b/62115660.\n"," let position = 0;\n"," do {\n"," const length = Math.min(fileData.byteLength - position, MAX_PAYLOAD_SIZE);\n"," const chunk = new Uint8Array(fileData, position, length);\n"," position += length;\n","\n"," const base64 = btoa(String.fromCharCode.apply(null, chunk));\n"," yield {\n"," response: {\n"," action: 'append',\n"," file: file.name,\n"," data: base64,\n"," },\n"," };\n","\n"," let percentDone = fileData.byteLength === 0 ?\n"," 100 :\n"," Math.round((position / fileData.byteLength) * 100);\n"," percent.textContent = `${percentDone}% done`;\n","\n"," } while (position < fileData.byteLength);\n"," }\n","\n"," // All done.\n"," yield {\n"," response: {\n"," action: 'complete',\n"," }\n"," };\n","}\n","\n","scope.google = scope.google || {};\n","scope.google.colab = scope.google.colab || {};\n","scope.google.colab._files = {\n"," _uploadFiles,\n"," _uploadFilesContinue,\n","};\n","})(self);\n","</script> "],"text/plain":["<IPython.core.display.HTML object>"]},"metadata":{},"output_type":"display_data"},{"name":"stdout","output_type":"stream","text":["Saving genotype.h5 to genotype.h5\n"]}],"source":["#upload genotype.h5\n","from google.colab import files\n","uploaded = files.upload()"]},{"cell_type":"code","source":["def get_testdata(datapath):\n"," # ytest = pd.read_csv(datapath + \"ytest_\"+studyname+\".csv\")\n"," h5file = tables.open_file(datapath + studyname + '_genotype_processed.h5', \"r\")\n"," # ybatch = ytest[\"labels\"]\n"," # xbatchid = np.array(ytest[\"tot_index\"].values, dtype=np.int64)\n"," xbatch = h5file.root.data[:]\n"," # ybatch = np.reshape(np.array(ybatch), (-1, 1))\n"," h5file.close()\n"," return xbatch"],"metadata":{"id":"k2BK2CDpA7N-"},"execution_count":null,"outputs":[]},{"cell_type":"code","execution_count":null,"metadata":{"id":"jkvt7OSih7rx"},"outputs":[],"source":["xtest = get_testdata(datapath)\n","pred = model.predict(xtest)\n","print('model prediction: ', pred)"]},{"cell_type":"code","execution_count":null,"metadata":{"id":"Rehan6kHsWnl"},"outputs":[],"source":["!python GenNet.py plot --help"]},{"cell_type":"code","execution_count":null,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"elapsed":7386,"status":"ok","timestamp":1678446242564,"user":{"displayName":"Alina F","userId":"17740631281554612245"},"user_tz":-240},"id":"KHFvo68gMGlk","outputId":"56d40c38-9248-4d6a-b090-0dc3197b6e7c"},"outputs":[{"name":"stdout","output_type":"stream","text":["2023-03-10 11:03:58.890805: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer.so.7'; dlerror: libnvinfer.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /usr/local/nvidia/lib:/usr/local/nvidia/lib64\n","2023-03-10 11:03:58.891032: W tensorflow/compiler/xla/stream_executor/platform/default/dso_loader.cc:64] Could not load dynamic library 'libnvinfer_plugin.so.7'; dlerror: libnvinfer_plugin.so.7: cannot open shared object file: No such file or directory; LD_LIBRARY_PATH: /usr/local/nvidia/lib:/usr/local/nvidia/lib64\n","2023-03-10 11:03:58.891063: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Cannot dlopen some TensorRT libraries. If you would like to use Nvidia GPU with TensorRT, please make sure the missing libraries mentioned above are installed properly.\n","bitarray failed to import this might give some trouble converting binary files\n","Resultspath did not exist but is made now\n","Traceback (most recent call last):\n"," File \"/content/GenNet/GenNet.py\", line 284, in <module>\n"," main()\n"," File \"/content/GenNet/GenNet.py\", line 26, in main\n"," plot(args)\n"," File \"/content/GenNet/GenNet_utils/Create_plots.py\", line 233, in plot\n"," importance_csv = pd.read_csv(resultpath + \"/connection_weights.csv\", index_col=0)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/util/_decorators.py\", line 311, in wrapper\n"," return func(*args, **kwargs)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 586, in read_csv\n"," return _read(filepath_or_buffer, kwds)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 482, in _read\n"," parser = TextFileReader(filepath_or_buffer, **kwds)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 811, in __init__\n"," self._engine = self._make_engine(self.engine)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/readers.py\", line 1040, in _make_engine\n"," return mapping[engine](self.f, **self.options) # type: ignore[call-arg]\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/c_parser_wrapper.py\", line 51, in __init__\n"," self._open_handles(src, kwds)\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/parsers/base_parser.py\", line 222, in _open_handles\n"," self.handles = get_handle(\n"," File \"/usr/local/lib/python3.9/dist-packages/pandas/io/common.py\", line 702, in get_handle\n"," handle = open(\n","FileNotFoundError: [Errno 2] No such file or directory: '/content/GenNet/results/GenNet_experiment_100001_//connection_weights.csv'\n"]}],"source":["!python GenNet.py plot -type sunburst -ID 100001"]}],"metadata":{"colab":{"provenance":[],"authorship_tag":"ABX9TyM8pzXuHVzrY81OQ8Q40qkw"},"kernelspec":{"display_name":"Python 3","name":"python3"},"language_info":{"name":"python"}},"nbformat":4,"nbformat_minor":0}
gennet_launch_Gene_networks_RefGene.py ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ os.chdir("/data/public/GenNet")
3
+ import sys
4
+ import glob
5
+ import numpy as np
6
+ import pandas as pd
7
+ #sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
8
+ import matplotlib
9
+ matplotlib.use('agg')
10
+ import tensorflow as tf
11
+ import tensorflow.keras as K
12
+ import scipy
13
+ import tables
14
+
15
+ tf.keras.backend.set_epsilon(0.0000001)
16
+ tf_version = tf.__version__ # ToDo use packaging.version
17
+ if tf_version <= '1.13.1':
18
+ from GenNet_utils.LocallyDirectedConnected import LocallyDirected1D
19
+ print('= or less then 1.13.1: tensorflow version is', tf_version)
20
+ elif tf_version >= '2.0':
21
+ from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D
22
+ print('= or more then 2.0: tensorflow version is', tf_version)
23
+ else:
24
+ print("unexpected tensorflow version")
25
+ from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D
26
+
27
+ studyname = 'test_GN_ref'
28
+
29
+ def layer_block(model, mask, i, regression):
30
+
31
+ if regression:
32
+ activation_type="relu"
33
+ else:
34
+ activation_type="tanh"
35
+
36
+ model = LocallyDirected1D(mask=mask, filters=1, input_shape=(mask.shape[0], 1),
37
+ name="LocallyDirected_" + str(i))(model)
38
+ model = K.layers.Flatten()(model)
39
+
40
+ model = K.layers.Activation(activation_type)(model)
41
+ model = K.layers.BatchNormalization(center=False, scale=False)(model)
42
+ return model
43
+
44
+
45
+ def add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain):
46
+ if num_covariates > 0:
47
+ model = activation_layer(model, regression, negative_values_ytrain)
48
+ model = K.layers.concatenate([model, input_cov], axis=1)
49
+ model = K.layers.BatchNormalization(center=False, scale=False)(model)
50
+ model = K.layers.Dense(units=1, bias_initializer= tf.keras.initializers.Constant(mean_ytrain))(model)
51
+ return model
52
+
53
+
54
+ def activation_layer(model, regression, negative_values_ytrain):
55
+
56
+ if regression:
57
+ if negative_values_ytrain:
58
+ model = K.layers.Activation("linear")(model)
59
+ print('using a linear activation function')
60
+ else:
61
+ model = K.layers.Activation("relu")(model)
62
+ print('using a relu activation function')
63
+ else:
64
+ model = K.layers.Activation("sigmoid")(model)
65
+
66
+ return model
67
+
68
+ def create_network_from_npz(datapath,
69
+ inputsize,
70
+ genotype_path,
71
+ l1_value=0.01,
72
+ regression=False,
73
+ num_covariates=0,
74
+ mask_order = []):
75
+ print("Creating networks from npz masks")
76
+ print("regression", regression)
77
+ if regression:
78
+ mean_ytrain, negative_values_ytrain = regression_properties(datapath)
79
+ else:
80
+ mean_ytrain = 0
81
+ negative_values_ytrain = False
82
+
83
+ masks = []
84
+ mask_shapes_x = []
85
+ mask_shapes_y = []
86
+
87
+ print(mask_order)
88
+
89
+ if len(mask_order) > 0: # if mask_order is defined we use this order
90
+ for mask in mask_order:
91
+ mask = scipy.sparse.load_npz(datapath + '/'+str(mask)+'.npz')
92
+ masks.append(mask)
93
+ mask_shapes_x.append(mask.shape[0])
94
+ mask_shapes_y.append(mask.shape[1])
95
+
96
+ for x in range(len(masks) - 1): # check that the masks fit eachother
97
+ assert mask_shapes_y[x] == mask_shapes_x[x + 1]
98
+ else:
99
+ # if mask order is not defined we can sort the mask by the size
100
+ for npz_path in glob.glob(datapath + '/*.npz'):
101
+ mask = scipy.sparse.load_npz(npz_path)
102
+ masks.append(mask)
103
+ mask_shapes_x.append(mask.shape[0])
104
+ mask_shapes_y.append(mask.shape[1])
105
+
106
+ for i in range(len(masks)): # sort all the masks in the correct order
107
+ argsort_x = np.argsort(mask_shapes_x)[::-1]
108
+ argsort_y = np.argsort(mask_shapes_y)[::-1]
109
+
110
+ mask_shapes_x = np.array(mask_shapes_x)
111
+ mask_shapes_y = np.array(mask_shapes_y)
112
+ assert all(argsort_x == argsort_y) # check that both dimensions have the same order
113
+
114
+ masks = [masks[i] for i in argsort_y] # sort masks
115
+ mask_shapes_x = mask_shapes_x[argsort_x]
116
+ mask_shapes_y = mask_shapes_y[argsort_y]
117
+
118
+ for x in range(len(masks) - 1): # check that the masks fit eachother
119
+ assert mask_shapes_y[x] == mask_shapes_x[x + 1]
120
+ print('mask_shapes_x[0]', mask_shapes_x[0])
121
+ assert mask_shapes_x[0] == inputsize
122
+ print('mask_shapes_y[-1]', mask_shapes_y[-1])
123
+ if mask_shapes_y[-1] == 1: # should we end with a dense layer?
124
+ all_masks_available = True
125
+ else:
126
+ all_masks_available = False
127
+
128
+ input_layer = K.Input((inputsize,), name='input_layer')
129
+ input_cov = K.Input((num_covariates,), name='inputs_cov')
130
+
131
+ model = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(input_layer)
132
+
133
+ for i in range(len(masks)):
134
+ mask = masks[i]
135
+ model = layer_block(model, mask, i, regression)
136
+
137
+ model = K.layers.Flatten()(model)
138
+
139
+ if all_masks_available:
140
+ model = LocallyDirected1D(mask=masks[-1], filters=1, input_shape=(mask.shape[0], 1),
141
+ name="output_layer")(model)
142
+ else:
143
+ model = K.layers.Dense(units=1, name="output_layer",
144
+ kernel_regularizer=tf.keras.regularizers.l1(l=l1_value)
145
+ )(model)
146
+
147
+ model = add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain)
148
+
149
+ output_layer = activation_layer(model, regression, negative_values_ytrain)
150
+ model = K.Model(inputs=[input_layer, input_cov], outputs=output_layer)
151
+
152
+ print(model.summary())
153
+
154
+ return model, masks
155
+
156
+ def get_testdata(datapath):
157
+ # ytest = pd.read_csv(datapath + "ytest_"+studyname+".csv")
158
+ h5file = tables.open_file(datapath + studyname + '_genotype_processed.h5', "r")
159
+ # ybatch = ytest["labels"]
160
+ # xbatchid = np.array(ytest["tot_index"].values, dtype=np.int64)
161
+ xbatch = h5file.root.data[:]
162
+ # ybatch = np.reshape(np.array(ybatch), (-1, 1))
163
+ h5file.close()
164
+ return xbatch
165
+
166
+ def predict():
167
+ xtest = get_testdata(datapath)
168
+ pred = model.predict(xtest)
169
+ print('model prediction: ', pred)
170
+
171
+ datapath = '/data/public/GenNet/processed_data/'
172
+ inputsize = 6986636
173
+ num_covariates = 0
174
+ genotype_path = datapath
175
+ l1_value = 0.001
176
+ model, masks = create_network_from_npz(datapath=datapath, inputsize=inputsize, genotype_path=genotype_path,mask_order=['UKBB_sparse_connection_mask_refseq_alligned'],
177
+ l1_value=l1_value, regression=False, num_covariates=num_covariates,)
178
+ model.load_weights(datapath + 'bestweight_job_diabetes.h5')
179
+ print('weights have been loaded')
180
+ predict()
gennet_launch_ImmGen.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ os.chdir("/data/public/GenNet")
3
+ import sys
4
+ import glob
5
+ import numpy as np
6
+ import pandas as pd
7
+ #sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
8
+ import matplotlib
9
+ matplotlib.use('agg')
10
+ import tensorflow as tf
11
+ import tensorflow.keras as K
12
+ import scipy
13
+ import tables
14
+
15
+ tf.keras.backend.set_epsilon(0.0000001)
16
+ tf_version = tf.__version__ # ToDo use packaging.version
17
+ if tf_version <= '1.13.1':
18
+ from GenNet_utils.LocallyDirectedConnected import LocallyDirected1D
19
+ print('= or less then 1.13.1: tensorflow version is', tf_version)
20
+ elif tf_version >= '2.0':
21
+ from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D
22
+ print('= or more then 2.0: tensorflow version is', tf_version)
23
+ else:
24
+ print("unexpected tensorflow version")
25
+ from GenNet_utils.LocallyDirectedConnected_tf2 import LocallyDirected1D
26
+
27
+ studyname = 'test_ImmGen'
28
+
29
+ def layer_block(model, mask, i, regression):
30
+
31
+ if regression:
32
+ activation_type="relu"
33
+ else:
34
+ activation_type="tanh"
35
+
36
+ model = LocallyDirected1D(mask=mask, filters=1, input_shape=(mask.shape[0], 1),
37
+ name="LocallyDirected_" + str(i))(model)
38
+
39
+ model = K.layers.Activation(activation_type)(model)
40
+ model = K.layers.BatchNormalization(center=False, scale=False)(model)
41
+ return model
42
+
43
+
44
+ def add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain):
45
+ if num_covariates > 0:
46
+ model = activation_layer(model, regression, negative_values_ytrain)
47
+ model = K.layers.concatenate([model, input_cov], axis=1)
48
+ model = K.layers.BatchNormalization(center=False, scale=False)(model)
49
+ model = K.layers.Dense(units=1, bias_initializer= tf.keras.initializers.Constant(mean_ytrain))(model)
50
+ return model
51
+
52
+
53
+ def activation_layer(model, regression, negative_values_ytrain):
54
+
55
+ if regression:
56
+ if negative_values_ytrain:
57
+ model = K.layers.Activation("linear")(model)
58
+ print('using a linear activation function')
59
+ else:
60
+ model = K.layers.Activation("relu")(model)
61
+ print('using a relu activation function')
62
+ else:
63
+ model = K.layers.Activation("sigmoid")(model)
64
+
65
+ return model
66
+
67
+ def create_network_from_npz(datapath,
68
+ inputsize,
69
+ genotype_path,
70
+ l1_value=0.01,
71
+ regression=False,
72
+ num_covariates=0,
73
+ mask_order = []):
74
+ print("Creating networks from npz masks")
75
+ print("regression", regression)
76
+ if regression:
77
+ mean_ytrain, negative_values_ytrain = regression_properties(datapath)
78
+ else:
79
+ mean_ytrain = 0
80
+ negative_values_ytrain = False
81
+
82
+ masks = []
83
+ mask_shapes_x = []
84
+ mask_shapes_y = []
85
+
86
+ print(mask_order)
87
+
88
+ if len(mask_order) > 0: # if mask_order is defined we use this order
89
+ for mask in mask_order:
90
+ mask = scipy.sparse.load_npz(datapath + '/'+str(mask)+'.npz')
91
+ masks.append(mask)
92
+ mask_shapes_x.append(mask.shape[0])
93
+ mask_shapes_y.append(mask.shape[1])
94
+
95
+ for x in range(len(masks) - 1): # check that the masks fit eachother
96
+ assert mask_shapes_y[x] == mask_shapes_x[x + 1]
97
+ else:
98
+ # if mask order is not defined we can sort the mask by the size
99
+ for npz_path in glob.glob(datapath + '/*.npz'):
100
+ mask = scipy.sparse.load_npz(npz_path)
101
+ masks.append(mask)
102
+ mask_shapes_x.append(mask.shape[0])
103
+ mask_shapes_y.append(mask.shape[1])
104
+
105
+ for i in range(len(masks)): # sort all the masks in the correct order
106
+ argsort_x = np.argsort(mask_shapes_x)[::-1]
107
+ argsort_y = np.argsort(mask_shapes_y)[::-1]
108
+
109
+ mask_shapes_x = np.array(mask_shapes_x)
110
+ mask_shapes_y = np.array(mask_shapes_y)
111
+ assert all(argsort_x == argsort_y) # check that both dimensions have the same order
112
+
113
+ masks = [masks[i] for i in argsort_y] # sort masks
114
+ mask_shapes_x = mask_shapes_x[argsort_x]
115
+ mask_shapes_y = mask_shapes_y[argsort_y]
116
+
117
+ for x in range(len(masks) - 1): # check that the masks fit eachother
118
+ assert mask_shapes_y[x] == mask_shapes_x[x + 1]
119
+ print('mask_shapes_x[0]', mask_shapes_x[0])
120
+ assert mask_shapes_x[0] == inputsize
121
+ print('mask_shapes_y[-1]', mask_shapes_y[-1])
122
+ if mask_shapes_y[-1] == 1: # should we end with a dense layer?
123
+ all_masks_available = True
124
+ else:
125
+ all_masks_available = False
126
+
127
+ input_layer = K.Input((inputsize,), name='input_layer')
128
+ input_cov = K.Input((num_covariates,), name='inputs_cov')
129
+
130
+ model = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(input_layer)
131
+
132
+ for i in range(len(masks)):
133
+ mask = masks[i]
134
+ model = layer_block(model, mask, i, regression)
135
+
136
+ model = K.layers.Flatten()(model)
137
+
138
+ if all_masks_available:
139
+ model = LocallyDirected1D(mask=masks[-1], filters=1, input_shape=(mask.shape[0], 1),
140
+ name="output_layer")(model)
141
+ else:
142
+ model = K.layers.Dense(units=1, name="output_layer",
143
+ kernel_regularizer=tf.keras.regularizers.l1(l=l1_value)
144
+ )(model)
145
+
146
+ model = add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain)
147
+
148
+ output_layer = activation_layer(model, regression, negative_values_ytrain)
149
+ model = K.Model(inputs=[input_layer, input_cov], outputs=output_layer)
150
+
151
+ print(model.summary())
152
+
153
+ return model, masks
154
+
155
+ def get_testdata(datapath):
156
+ # ytest = pd.read_csv(datapath + "ytest_"+studyname+".csv")
157
+ h5file = tables.open_file(datapath + studyname + '_genotype_processed.h5', "r")
158
+ # ybatch = ytest["labels"]
159
+ # xbatchid = np.array(ytest["tot_index"].values, dtype=np.int64)
160
+ xbatch = h5file.root.data[:]
161
+ # ybatch = np.reshape(np.array(ybatch), (-1, 1))
162
+ h5file.close()
163
+ return xbatch
164
+
165
+ def predict():
166
+ xtest = get_testdata(datapath)
167
+ pred = model.predict(xtest)
168
+ print('model prediction: ', pred)
169
+
170
+ datapath = '/data/public/GenNet/processed_data/'
171
+ inputsize = 6986636
172
+ num_covariates = 0
173
+ genotype_path = datapath
174
+ l1_value = 0.001
175
+ model, masks = create_network_from_npz(datapath=datapath, inputsize=inputsize, genotype_path=genotype_path,mask_order=['UKBB_sparse_connection_mask_ensmb_alligned','gene_ensmbl_ImmGen_mask_tstat'],
176
+ l1_value=l1_value, regression=False, num_covariates=num_covariates,)
177
+ model.load_weights(datapath + 'bestweight_job_Imm_CAD.h5')
178
+ print('weights have been loaded')
179
+ predict()
gennet_launch_ImmGene.ipynb ADDED
The diff for this file is too large to render. See raw diff