from google.colab import drive
drive.mount('/content/gdrive')
Mounted at /content/gdrive
%cd gdrive/My Drive
/content/gdrive/My Drive
import os
if not os.path.isdir('neuroSAT'):
!git clone --recurse-submodules https://github.com/dmeoli/neuroSAT
%cd neuroSAT
else:
%cd neuroSAT
!git pull
/content/gdrive/My Drive/neuroSAT Already up to date.
!pip install -r requirements.txt
Looking in links: https://download.pytorch.org/whl/cu113/torch_stable.html, https://data.pyg.org/whl/torch-1.10.0+cu113.html Collecting gym==0.20.0 Downloading gym-0.20.0.tar.gz (1.6 MB) |████████████████████████████████| 1.6 MB 4.1 MB/s Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 2)) (1.19.5) Requirement already satisfied: PyYAML in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 3)) (3.13) Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 4)) (1.4.1) Collecting split-folders Downloading split_folders-0.4.3-py3-none-any.whl (7.4 kB) Collecting tensorboardX Downloading tensorboardX-2.4.1-py2.py3-none-any.whl (124 kB) |████████████████████████████████| 124 kB 66.7 MB/s Requirement already satisfied: tensorflow in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 7)) (2.7.0) Collecting torch==1.10.0+cu113 Downloading https://download.pytorch.org/whl/cu113/torch-1.10.0%2Bcu113-cp37-cp37m-linux_x86_64.whl (1821.5 MB) |██████████████▋ | 834.1 MB 133.0 MB/s eta 0:00:08tcmalloc: large alloc 1147494400 bytes == 0x564cc3796000 @ 0x7fee6e851615 0x564cbfe0c4cc 0x564cbfeec47a 0x564cbfe0f2ed 0x564cbff00e1d 0x564cbfe82e99 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe82d00 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbfe11039 0x564cbfe54409 0x564cbfe0fc52 0x564cbfe82c25 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7e915 0x564cbfe10afa 0x564cbfe7ec0d 0x564cbfe7d9ee |██████████████████▌ | 1055.7 MB 1.2 MB/s eta 0:10:46tcmalloc: large alloc 1434370048 bytes == 0x564d07dec000 @ 0x7fee6e851615 0x564cbfe0c4cc 0x564cbfeec47a 0x564cbfe0f2ed 0x564cbff00e1d 0x564cbfe82e99 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe82d00 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbfe11039 0x564cbfe54409 0x564cbfe0fc52 0x564cbfe82c25 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7e915 0x564cbfe10afa 0x564cbfe7ec0d 0x564cbfe7d9ee |███████████████████████▌ | 1336.2 MB 1.2 MB/s eta 0:06:56tcmalloc: large alloc 1792966656 bytes == 0x564d5d5d8000 @ 0x7fee6e851615 0x564cbfe0c4cc 0x564cbfeec47a 0x564cbfe0f2ed 0x564cbff00e1d 0x564cbfe82e99 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe82d00 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbfe11039 0x564cbfe54409 0x564cbfe0fc52 0x564cbfe82c25 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7e915 0x564cbfe10afa 0x564cbfe7ec0d 0x564cbfe7d9ee |█████████████████████████████▊ | 1691.1 MB 1.1 MB/s eta 0:01:56tcmalloc: large alloc 2241208320 bytes == 0x564cc3796000 @ 0x7fee6e851615 0x564cbfe0c4cc 0x564cbfeec47a 0x564cbfe0f2ed 0x564cbff00e1d 0x564cbfe82e99 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe82d00 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbff01c66 0x564cbfe7edaf 0x564cbfe11039 0x564cbfe54409 0x564cbfe0fc52 0x564cbfe82c25 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7e915 0x564cbfe10afa 0x564cbfe7ec0d 0x564cbfe7d9ee |████████████████████████████████| 1821.5 MB 1.2 MB/s eta 0:00:01tcmalloc: large alloc 1821458432 bytes == 0x564d490f8000 @ 0x7fee6e8501e7 0x564cbfe42067 0x564cbfe0c4cc 0x564cbfeec47a 0x564cbfe0f2ed 0x564cbff00e1d 0x564cbfe82e99 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe10afa 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee tcmalloc: large alloc 2276827136 bytes == 0x564db5a0c000 @ 0x7fee6e851615 0x564cbfe0c4cc 0x564cbfeec47a 0x564cbfe0f2ed 0x564cbff00e1d 0x564cbfe82e99 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7ec0d 0x564cbfe10afa 0x564cbfe7ec0d 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee 0x564cbfe10bda 0x564cbfe7f737 0x564cbfe7d9ee 0x564cbfe11271 |████████████████████████████████| 1821.5 MB 5.7 kB/s Collecting torch-scatter Downloading https://data.pyg.org/whl/torch-1.10.0%2Bcu113/torch_scatter-2.0.9-cp37-cp37m-linux_x86_64.whl (7.9 MB) |████████████████████████████████| 7.9 MB 2.7 MB/s Collecting torch-sparse Downloading https://data.pyg.org/whl/torch-1.10.0%2Bcu113/torch_sparse-0.6.12-cp37-cp37m-linux_x86_64.whl (3.5 MB) |████████████████████████████████| 3.5 MB 58.9 MB/s Collecting torch-geometric Downloading torch_geometric-2.0.3.tar.gz (370 kB) |████████████████████████████████| 370 kB 74.7 MB/s Requirement already satisfied: cloudpickle>=1.2.0 in /usr/local/lib/python3.7/dist-packages (from gym==0.20.0->-r requirements.txt (line 1)) (1.3.0) Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch==1.10.0+cu113->-r requirements.txt (line 9)) (3.10.0.2) Requirement already satisfied: protobuf>=3.8.0 in /usr/local/lib/python3.7/dist-packages (from tensorboardX->-r requirements.txt (line 6)) (3.17.3) Requirement already satisfied: six>=1.9 in /usr/local/lib/python3.7/dist-packages (from protobuf>=3.8.0->tensorboardX->-r requirements.txt (line 6)) (1.15.0) Requirement already satisfied: keras-preprocessing>=1.1.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (1.1.2) Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (3.3.0) Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.21.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (0.22.0) Requirement already satisfied: wheel<1.0,>=0.32.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (0.37.0) Requirement already satisfied: tensorflow-estimator<2.8,~=2.7.0rc0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (2.7.0) Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (3.1.0) Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (1.1.0) Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (1.6.3) Requirement already satisfied: wrapt>=1.11.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (1.13.3) Requirement already satisfied: flatbuffers<3.0,>=1.12 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (2.0) Requirement already satisfied: libclang>=9.0.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (12.0.0) Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (1.42.0) Requirement already satisfied: absl-py>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (0.12.0) Requirement already satisfied: tensorboard~=2.6 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (2.7.0) Requirement already satisfied: keras<2.8,>=2.7.0rc0 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (2.7.0) Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (0.2.0) Requirement already satisfied: gast<0.5.0,>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow->-r requirements.txt (line 7)) (0.4.0) Requirement already satisfied: cached-property in /usr/local/lib/python3.7/dist-packages (from h5py>=2.9.0->tensorflow->-r requirements.txt (line 7)) (1.5.2) Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (57.4.0) Requirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (0.6.1) Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (2.23.0) Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (1.35.0) Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (0.4.6) Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (1.8.0) Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (3.3.6) Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (1.0.1) Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (4.8) Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (0.2.8) Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (4.2.4) Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.7/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (1.3.0) Requirement already satisfied: importlib-metadata>=4.4 in /usr/local/lib/python3.7/dist-packages (from markdown>=2.6.8->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (4.8.2) Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata>=4.4->markdown>=2.6.8->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (3.6.0) Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.7/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (0.4.8) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (2021.10.8) Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (1.24.3) Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (3.0.4) Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (2.10) Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.6->tensorflow->-r requirements.txt (line 7)) (3.1.1) Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from torch-geometric->-r requirements.txt (line 13)) (4.62.3) Requirement already satisfied: networkx in /usr/local/lib/python3.7/dist-packages (from torch-geometric->-r requirements.txt (line 13)) (2.6.3) Requirement already satisfied: scikit-learn in /usr/local/lib/python3.7/dist-packages (from torch-geometric->-r requirements.txt (line 13)) (1.0.1) Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from torch-geometric->-r requirements.txt (line 13)) (1.1.5) Collecting rdflib Downloading rdflib-6.1.1-py3-none-any.whl (482 kB) |████████████████████████████████| 482 kB 68.3 MB/s Requirement already satisfied: googledrivedownloader in /usr/local/lib/python3.7/dist-packages (from torch-geometric->-r requirements.txt (line 13)) (0.4) Requirement already satisfied: jinja2 in /usr/local/lib/python3.7/dist-packages (from torch-geometric->-r requirements.txt (line 13)) (2.11.3) Requirement already satisfied: pyparsing in /usr/local/lib/python3.7/dist-packages (from torch-geometric->-r requirements.txt (line 13)) (3.0.6) Collecting yacs Downloading yacs-0.1.8-py3-none-any.whl (14 kB) Requirement already satisfied: MarkupSafe>=0.23 in /usr/local/lib/python3.7/dist-packages (from jinja2->torch-geometric->-r requirements.txt (line 13)) (2.0.1) Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->torch-geometric->-r requirements.txt (line 13)) (2018.9) Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas->torch-geometric->-r requirements.txt (line 13)) (2.8.2) Collecting isodate Downloading isodate-0.6.1-py2.py3-none-any.whl (41 kB) |████████████████████████████████| 41 kB 783 kB/s Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.7/dist-packages (from scikit-learn->torch-geometric->-r requirements.txt (line 13)) (1.1.0) Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from scikit-learn->torch-geometric->-r requirements.txt (line 13)) (3.0.0) Building wheels for collected packages: gym, torch-geometric Building wheel for gym (setup.py) ... done Created wheel for gym: filename=gym-0.20.0-py3-none-any.whl size=1650479 sha256=5213ac4aba93f2e455a102194da33649ae2103b73be13f9e22135b155e56b536 Stored in directory: /root/.cache/pip/wheels/0d/08/46/9393ca20304119b40cbb5794479b77658b42e3db6d3bdd343e Building wheel for torch-geometric (setup.py) ... done Created wheel for torch-geometric: filename=torch_geometric-2.0.3-py3-none-any.whl size=581969 sha256=8e6379070913ee81a971c7fb5529b460166f8aaab7f7a54e68cacfb7a658063e Stored in directory: /root/.cache/pip/wheels/c3/2a/58/87ce0508964d4def1aafb92750c4f3ac77038efd1b9a89dcf5 Successfully built gym torch-geometric Installing collected packages: isodate, yacs, rdflib, torch-sparse, torch-scatter, torch-geometric, torch, tensorboardX, split-folders, gym Attempting uninstall: torch Found existing installation: torch 1.10.0+cu111 Uninstalling torch-1.10.0+cu111: Successfully uninstalled torch-1.10.0+cu111 Attempting uninstall: gym Found existing installation: gym 0.17.3 Uninstalling gym-0.17.3: Successfully uninstalled gym-0.17.3 Successfully installed gym-0.20.0 isodate-0.6.1 rdflib-6.1.1 split-folders-0.4.3 tensorboardX-2.4.1 torch-1.10.0+cu113 torch-geometric-2.0.3 torch-scatter-2.0.9 torch-sparse-0.6.12 yacs-0.1.8
datasets = {'uniform-random-3-sat': {'train': ['uf50-218', 'uuf50-218',
'uf100-430', 'uuf100-430'],
'val': ['uf50-218', 'uuf50-218',
'uf100-430', 'uuf100-430'],
'inner_test': ['uf50-218', 'uuf50-218',
'uf100-430', 'uuf100-430'],
'test': ['uf250-1065', 'uuf250-1065']},
'graph-coloring': {'train': ['flat50-115'],
'val': ['flat50-115'],
'inner_test': ['flat50-115'],
'test': ['flat30-60',
'flat75-180',
'flat100-239',
'flat125-301',
'flat150-360',
'flat175-417',
'flat200-479']}}
%cd GQSAT
/content/gdrive/My Drive/neuroSAT/GQSAT
%cd minisat
!sudo ln -s --force /usr/local/lib/python3.7/dist-packages/numpy/core/include/numpy /usr/include/numpy # https://stackoverflow.com/a/44935933/5555994
!make distclean && CXXFLAGS=-w make && make python-wrap PYTHON=python3.7
!apt install swig
!swig -fastdispatch -c++ -python minisat/gym/GymSolver.i
%cd ..
/content/gdrive/My Drive/neuroSAT/GQSAT/minisat rm -f build/release/minisat/core/Solver.o build/release/minisat/core/Main.o build/release/minisat/simp/SimpSolver.o build/release/minisat/simp/Main.o build/release/minisat/utils/System.o build/release/minisat/utils/Options.o build/release/minisat/gym/GymSolver.o build/debug/minisat/core/Solver.o build/debug/minisat/core/Main.o build/debug/minisat/simp/SimpSolver.o build/debug/minisat/simp/Main.o build/debug/minisat/utils/System.o build/debug/minisat/utils/Options.o build/debug/minisat/gym/GymSolver.o build/profile/minisat/core/Solver.o build/profile/minisat/core/Main.o build/profile/minisat/simp/SimpSolver.o build/profile/minisat/simp/Main.o build/profile/minisat/utils/System.o build/profile/minisat/utils/Options.o build/profile/minisat/gym/GymSolver.o build/dynamic/minisat/core/Solver.o build/dynamic/minisat/core/Main.o build/dynamic/minisat/simp/SimpSolver.o build/dynamic/minisat/simp/Main.o build/dynamic/minisat/utils/System.o build/dynamic/minisat/utils/Options.o build/dynamic/minisat/gym/GymSolver.o \ build/release/minisat/core/Solver.d build/release/minisat/core/Main.d build/release/minisat/simp/SimpSolver.d build/release/minisat/simp/Main.d build/release/minisat/utils/System.d build/release/minisat/utils/Options.d build/release/minisat/gym/GymSolver.d build/debug/minisat/core/Solver.d build/debug/minisat/core/Main.d build/debug/minisat/simp/SimpSolver.d build/debug/minisat/simp/Main.d build/debug/minisat/utils/System.d build/debug/minisat/utils/Options.d build/debug/minisat/gym/GymSolver.d build/profile/minisat/core/Solver.d build/profile/minisat/core/Main.d build/profile/minisat/simp/SimpSolver.d build/profile/minisat/simp/Main.d build/profile/minisat/utils/System.d build/profile/minisat/utils/Options.d build/profile/minisat/gym/GymSolver.d build/dynamic/minisat/core/Solver.d build/dynamic/minisat/core/Main.d build/dynamic/minisat/simp/SimpSolver.d build/dynamic/minisat/simp/Main.d build/dynamic/minisat/utils/System.d build/dynamic/minisat/utils/Options.d build/dynamic/minisat/gym/GymSolver.d \ build/release/bin/minisat_core build/release/bin/minisat build/debug/bin/minisat_core build/debug/bin/minisat build/profile/bin/minisat_core build/profile/bin/minisat build/dynamic/bin/minisat_core build/dynamic/bin/minisat \ build/release/lib/libminisat.a build/debug/lib/libminisat.a build/profile/lib/libminisat.a \ build/dynamic/lib/libminisat.so.2.1.0\ build/dynamic/lib/libminisat.so.2\ build/dynamic/lib/libminisat.so rm -f config.mk Compiling: build/release/minisat/simp/Main.o Compiling: build/release/minisat/core/Solver.o Compiling: build/release/minisat/simp/SimpSolver.o Compiling: build/release/minisat/utils/System.o Compiling: build/release/minisat/utils/Options.o Compiling: build/release/minisat/gym/GymSolver.o Linking Static Library: build/release/lib/libminisat.a Linking Binary: build/release/bin/minisat Compiling: build/dynamic/minisat/core/Solver.o Compiling: build/dynamic/minisat/simp/SimpSolver.o Compiling: build/dynamic/minisat/utils/System.o Compiling: build/dynamic/minisat/utils/Options.o Compiling: build/dynamic/minisat/gym/GymSolver.o Linking Shared Library: build/dynamic/lib/libminisat.so.2.1.0 g++ -O2 -fPIC -c minisat/gym/GymSolver_wrap.cxx -o minisat/gym/GymSolver_wrap.o -I. -I/usr/include/python3.7 g++ -shared -o minisat/gym/_GymSolver.so build/dynamic/minisat/core/Solver.o build/dynamic/minisat/simp/SimpSolver.o build/dynamic/minisat/utils/System.o build/dynamic/minisat/utils/Options.o build/dynamic/minisat/gym/GymSolver.o minisat/gym/GymSolver_wrap.o /usr/lib/x86_64-linux-gnu/libz.so Reading package lists... Done Building dependency tree Reading state information... Done The following additional packages will be installed: swig3.0 Suggested packages: swig-doc swig-examples swig3.0-examples swig3.0-doc The following NEW packages will be installed: swig swig3.0 0 upgraded, 2 newly installed, 0 to remove and 37 not upgraded. Need to get 1,100 kB of archives. After this operation, 5,822 kB of additional disk space will be used. Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 swig3.0 amd64 3.0.12-1 [1,094 kB] Get:2 http://archive.ubuntu.com/ubuntu bionic/universe amd64 swig amd64 3.0.12-1 [6,460 B] Fetched 1,100 kB in 2s (489 kB/s) Selecting previously unselected package swig3.0. (Reading database ... 155222 files and directories currently installed.) Preparing to unpack .../swig3.0_3.0.12-1_amd64.deb ... Unpacking swig3.0 (3.0.12-1) ... Selecting previously unselected package swig. Preparing to unpack .../swig_3.0.12-1_amd64.deb ... Unpacking swig (3.0.12-1) ... Setting up swig3.0 (3.0.12-1) ... Setting up swig (3.0.12-1) ... Processing triggers for man-db (2.8.3-2ubuntu0.1) ... /content/gdrive/My Drive/neuroSAT/GQSAT
We split (u)uf50-218 and (u)uf100-430 into three subsets: 800 training problems, 100 validation, and 100 test problems.
For generalization experiments, we use 100 problems from all the other benchmarks.
To evaluate the knowledge transfer properties of the trained models across different task families, we use 100 problems from all the graph coloring benchmarks.
PROBLEM_TYPE='uniform-random-3-sat'
!bash train_val_test_split.sh "$PROBLEM_TYPE"
for TRAIN_PROBLEM_NAME, VAL_PROBLEM_NAME in zip(datasets[PROBLEM_TYPE]['train'],
datasets[PROBLEM_TYPE]['val']):
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/train/"$TRAIN_PROBLEM_NAME"
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/val/"$VAL_PROBLEM_NAME"
for TRAIN_PROBLEM_NAME, VAL_PROBLEM_NAME in zip(datasets[PROBLEM_TYPE]['train'],
datasets[PROBLEM_TYPE]['val']):
!python dqn.py \
--logdir log \
--env-name sat-v0 \
--train-problems-paths ../data/"$PROBLEM_TYPE"/train/"$TRAIN_PROBLEM_NAME" \
--eval-problems-paths ../data/"$PROBLEM_TYPE"/val/"$VAL_PROBLEM_NAME" \
--lr 0.00002 \
--bsize 64 \
--buffer-size 20000 \
--eps-init 1.0 \
--eps-final 0.01 \
--gamma 0.99 \
--eps-decay-steps 30000 \
--batch-updates 50000 \
--history-len 1 \
--init-exploration-steps 5000 \
--step-freq 4 \
--target-update-freq 10 \
--loss mse \
--opt adam \
--save-freq 500 \
--grad_clip 1 \
--grad_clip_norm_type 2 \
--eval-freq 1000 \
--eval-time-limit 3600 \
--core-steps 4 \
--expert-exploration-prob 0.0 \
--priority_alpha 0.5 \
--priority_beta 0.5 \
--e2v-aggregator sum \
--n_hidden 1 \
--hidden_size 64 \
--decoder_v_out_size 32 \
--decoder_e_out_size 1 \
--decoder_g_out_size 1 \
--encoder_v_out_size 32 \
--encoder_e_out_size 32 \
--encoder_g_out_size 32 \
--core_v_out_size 64 \
--core_e_out_size 64 \
--core_g_out_size 32 \
--activation relu \
--penalty_size 0.1 \
--train_time_max_decisions_allowed 500 \
--test_time_max_decisions_allowed 500 \
--no_max_cap_fill_buffer \
--lr_scheduler_gamma 1 \
--lr_scheduler_frequency 3000 \
--independent_block_layers 0
for PROBLEM_NAME in datasets[PROBLEM_TYPE]['inner_test']:
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/test/"$PROBLEM_NAME"
for PROBLEM_NAME in datasets['graph-coloring']['inner_test']:
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/test/"$PROBLEM_NAME"
for PROBLEM_NAME in datasets['graph-coloring']['test']:
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/"$PROBLEM_NAME"
models = {'uf50-218': ('Dec21_01-59-59_6bed2aa9b612',
'model_50002'),
'uuf50-218': ('Dec21_14-55-50_5eccdc34d583',
'model_50007'),
'uf100-430': ('Dec23_01-48-54_90582559eea7',
'model_50028'),
'uuf100-430': ('Dec23_14-42-44_90582559eea7',
'model_50037')}
We test these trained models on the inner test sets.
for SAT_MODEL in models.keys():
for PROBLEM_NAME in datasets[PROBLEM_TYPE]['inner_test']:
# do not use models trained on unSAT problems to solve SAT ones
if not (SAT_MODEL.startswith('uuf') and PROBLEM_NAME.startswith('uf')):
for MODEL_DECISION in [10, 50, 100, 300, 500, 1000]:
MODEL_DIR = models[SAT_MODEL][0]
CHECKPOINT = models[SAT_MODEL][1]
!python evaluate.py \
--logdir log \
--env-name sat-v0 \
--core-steps -1 \
--eps-final 0.0 \
--eval-time-limit 100000000000000 \
--no_restarts \
--test_time_max_decisions_allowed "$MODEL_DECISION" \
--eval-problems-paths ../data/"$PROBLEM_TYPE"/test/"$PROBLEM_NAME" \
--model-dir runs/"$MODEL_DIR" \
--model-checkpoint "$CHECKPOINT".chkp \
>> runs/"$MODEL_DIR"/"$PROBLEM_NAME"-graphqsat-max"$MODEL_DECISION".tsv
We test the trained models on the outer test sets.
for SAT_MODEL in models.keys():
for PROBLEM_NAME in datasets[PROBLEM_TYPE]['test']:
# do not use models trained on unSAT problems to solve SAT ones
if not (SAT_MODEL.startswith('uuf') and PROBLEM_NAME.startswith('uf')):
for MODEL_DECISION in [10, 50, 100, 300, 500, 1000]:
MODEL_DIR = models[SAT_MODEL][0]
CHECKPOINT = models[SAT_MODEL][1]
!python evaluate.py \
--logdir log \
--env-name sat-v0 \
--core-steps -1 \
--eps-final 0.0 \
--eval-time-limit 100000000000000 \
--no_restarts \
--test_time_max_decisions_allowed "$MODEL_DECISION" \
--eval-problems-paths ../data/"$PROBLEM_TYPE"/"$PROBLEM_NAME" \
--model-dir runs/"$MODEL_DIR" \
--model-checkpoint "$CHECKPOINT".chkp \
>> runs/"$MODEL_DIR"/"$PROBLEM_NAME"-graphqsat-max"$MODEL_DECISION".tsv
We test the trained models on the graph coloring test sets, both inners and outers.
for SAT_MODEL in models.keys():
# do not use models trained on unSAT problems to solve SAT ones
if not SAT_MODEL.startswith('uuf'):
for PROBLEM_NAME in datasets['graph-coloring']['inner_test']:
for MODEL_DECISION in [10, 50, 100, 300, 500, 1000]:
MODEL_DIR = models[SAT_MODEL][0]
CHECKPOINT = models[SAT_MODEL][1]
!python evaluate.py \
--logdir log \
--env-name sat-v0 \
--core-steps -1 \
--eps-final 0.0 \
--eval-time-limit 100000000000000 \
--no_restarts \
--test_time_max_decisions_allowed "$MODEL_DECISION" \
--eval-problems-paths ../data/graph-coloring/test/"$PROBLEM_NAME" \
--model-dir runs/"$MODEL_DIR" \
--model-checkpoint "$CHECKPOINT".chkp \
>> runs/"$MODEL_DIR"/"$PROBLEM_NAME"-graphqsat-max"$MODEL_DECISION".tsv
for SAT_MODEL in models.keys():
# do not use models trained on unSAT problems to solve SAT ones
if not SAT_MODEL.startswith('uuf'):
for PROBLEM_NAME in datasets['graph-coloring']['test']:
for MODEL_DECISION in [10, 50, 100, 300, 500, 1000]:
MODEL_DIR = models[SAT_MODEL][0]
CHECKPOINT = models[SAT_MODEL][1]
!python evaluate.py \
--logdir log \
--env-name sat-v0 \
--core-steps -1 \
--eps-final 0.0 \
--eval-time-limit 100000000000000 \
--no_restarts \
--test_time_max_decisions_allowed "$MODEL_DECISION" \
--eval-problems-paths ../data/graph-coloring/"$PROBLEM_NAME" \
--model-dir runs/"$MODEL_DIR" \
--model-checkpoint "$CHECKPOINT".chkp \
>> runs/"$MODEL_DIR"/"$PROBLEM_NAME"-graphqsat-max"$MODEL_DECISION".tsv
Graph coloring benchmarks have only 100 problems each, except for flat50-115 which contains 1000, so we split it into three subsets: 800 training problems, 100 validation, and 100 test problems.
For generalization experiments, we use 100 problems from all the other benchmarks.
PROBLEM_TYPE='graph-coloring'
!bash train_val_test_split.sh "$PROBLEM_TYPE"
for TRAIN_PROBLEM_NAME, VAL_PROBLEM_NAME in zip(datasets[PROBLEM_TYPE]['train'],
datasets[PROBLEM_TYPE]['val']):
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/train/"$TRAIN_PROBLEM_NAME"
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/val/"$VAL_PROBLEM_NAME"
for TRAIN_PROBLEM_NAME, VAL_PROBLEM_NAME in zip(datasets[PROBLEM_TYPE]['train'],
datasets[PROBLEM_TYPE]['val']):
!python dqn.py \
--logdir log \
--env-name sat-v0 \
--train-problems-paths ../data/"$PROBLEM_TYPE"/train/"$TRAIN_PROBLEM_NAME" \
--eval-problems-paths ../data/"$PROBLEM_TYPE"/val/"$VAL_PROBLEM_NAME" \
--lr 0.00002 \
--bsize 64 \
--buffer-size 20000 \
--eps-init 1.0 \
--eps-final 0.01 \
--gamma 0.99 \
--eps-decay-steps 30000 \
--batch-updates 50000 \
--history-len 1 \
--init-exploration-steps 5000 \
--step-freq 4 \
--target-update-freq 10 \
--loss mse \
--opt adam \
--save-freq 500 \
--grad_clip 0.1 \
--grad_clip_norm_type 2 \
--eval-freq 1000 \
--eval-time-limit 3600 \
--core-steps 4 \
--expert-exploration-prob 0.0 \
--priority_alpha 0.5 \
--priority_beta 0.5 \
--e2v-aggregator sum \
--n_hidden 1 \
--hidden_size 64 \
--decoder_v_out_size 32 \
--decoder_e_out_size 1 \
--decoder_g_out_size 1 \
--encoder_v_out_size 32 \
--encoder_e_out_size 32 \
--encoder_g_out_size 32 \
--core_v_out_size 64 \
--core_e_out_size 64 \
--core_g_out_size 32 \
--activation relu \
--penalty_size 0.1 \
--train_time_max_decisions_allowed 500 \
--test_time_max_decisions_allowed 500 \
--no_max_cap_fill_buffer \
--lr_scheduler_gamma 1 \
--lr_scheduler_frequency 3000 \
--independent_block_layers 0
for PROBLEM_NAME in datasets[PROBLEM_TYPE]['inner_test']:
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/test/"$PROBLEM_NAME"
for PROBLEM_NAME in datasets[PROBLEM_TYPE]['test']:
!python add_metadata.py --eval-problems-paths ../data/"$PROBLEM_TYPE"/"$PROBLEM_NAME"
MODEL_DIR='Dec08_08-39-57_e63e47f25457'
CHECKPOINT='model_50000'
We test this trained model on the inner test set.
for PROBLEM_NAME in datasets[PROBLEM_TYPE]['inner_test']:
for MODEL_DECISION in [10, 50, 100, 300, 500, 1000]:
!python evaluate.py \
--logdir log \
--env-name sat-v0 \
--core-steps -1 \
--eps-final 0.0 \
--eval-time-limit 100000000000000 \
--no_restarts \
--test_time_max_decisions_allowed "$MODEL_DECISION" \
--eval-problems-paths ../data/"$PROBLEM_TYPE"/test/"$PROBLEM_NAME" \
--model-dir runs/"$MODEL_DIR" \
--model-checkpoint "$CHECKPOINT".chkp \
>> runs/"$MODEL_DIR"/"$PROBLEM_NAME"-graphqsat-max"$MODEL_DECISION".tsv
We test the trained model on the outer test sets.
for PROBLEM_NAME in datasets[PROBLEM_TYPE]['test']:
for MODEL_DECISION in [10, 50, 100, 300, 500, 1000]:
!python evaluate.py \
--logdir log \
--env-name sat-v0 \
--core-steps -1 \
--eps-final 0.0 \
--eval-time-limit 100000000000000 \
--no_restarts \
--test_time_max_decisions_allowed "$MODEL_DECISION" \
--eval-problems-paths ../data/"$PROBLEM_TYPE"/"$PROBLEM_NAME" \
--model-dir runs/"$MODEL_DIR" \
--model-checkpoint "$CHECKPOINT".chkp \
>> runs/"$MODEL_DIR"/"$PROBLEM_NAME"-graphqsat-max"$MODEL_DECISION".tsv