Adding parallelizing script and a dummy config generator
This commit is contained in:
parent
c010e08e9b
commit
1e8cc292e1
74
explaining_framework/utils/config_gen.py
Normal file
74
explaining_framework/utils/config_gen.py
Normal file
@ -0,0 +1,74 @@
|
||||
import os
|
||||
|
||||
from explaining_framework.utils.io import write_yaml
|
||||
from torch_geometric.data.makedirs import makedirs
|
||||
|
||||
|
||||
def divide_chunks(l, n):
|
||||
for i in range(0, len(l), n):
|
||||
yield l[i : i + n]
|
||||
|
||||
|
||||
if "__main__" == __name__:
|
||||
config_folder = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__name__), "../../", "configs")
|
||||
)
|
||||
makedirs(config_folder)
|
||||
explaining_folder = os.path.join(config_folder, "explaining")
|
||||
makedirs(explaining_folder)
|
||||
explainer_folder = os.path.join(config_folder, "explaining")
|
||||
makedirs(explainer_folder)
|
||||
|
||||
DATASET = ["CIFAR10"]
|
||||
EXPLAINER = [
|
||||
"CAM",
|
||||
"GradCAM",
|
||||
"GNN_LRP",
|
||||
"GradExplainer",
|
||||
"GuidedBackPropagation",
|
||||
"IntegratedGradients",
|
||||
"PGExplainer",
|
||||
"PGMExplainer",
|
||||
"RandomExplainer",
|
||||
"SubgraphX",
|
||||
"GraphMASK",
|
||||
"GNNExplainer",
|
||||
"EIXGNN",
|
||||
"SCGNN",
|
||||
]
|
||||
|
||||
for dataset_name in DATASET:
|
||||
for chunk in divide_chunks(list(range(10000)), 500):
|
||||
for model_kind in ["best", "worst"]:
|
||||
for explainer_name in EXPLAINER:
|
||||
explaining_cfg = {}
|
||||
|
||||
# explaining_cfg['adjust']['strategy']= 'rpns'
|
||||
# explaining_cfg['attack']['name']= 'all'
|
||||
explaining_cfg[
|
||||
"cfg_dest"
|
||||
] = f"dataset={dataset_name}-model={model_kind}=explainer={explainer_name}-chunk=[{chunk[0]},{chunk[-1]}]"
|
||||
explaining_cfg["dataset"] = {}
|
||||
explaining_cfg["dataset"]["name"] = dataset_name
|
||||
explaining_cfg["dataset"]["item"] = chunk
|
||||
# explaining_cfg['explainer']['cfg']= 'default'
|
||||
explaining_cfg["explainer"] = {}
|
||||
explaining_cfg["explainer"]["name"] = explainer_name
|
||||
explaining_cfg["explanation_type"] = "phenomenon"
|
||||
# explaining_cfg['metrics']['accuracy']['name']='all'
|
||||
# explaining_cfg['metrics']['fidelity']['name']='all'
|
||||
# explaining_cfg['metrics']['sparsity']['name']='all'
|
||||
explaining_cfg["model"] = {}
|
||||
explaining_cfg["model"]["ckpt"] = model_kind
|
||||
explaining_cfg["model"][
|
||||
"path"
|
||||
] = "/home/SIC/araison/test_ggym/pytorch_geometric/graphgym/results"
|
||||
# explaining_cfg['out_dir']='./explanation'
|
||||
# explaining_cfg['print']='both'
|
||||
# explaining_cfg['threshold']['config']['type']='all'
|
||||
# explaining_cfg['threshold']['value']['hard']=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
|
||||
# explaining_cfg['threshold']['value']['topk']=[2, 3, 5, 10, 20, 30, 50]
|
||||
write_yaml(
|
||||
explaining_cfg,
|
||||
explaining_folder + "/" + explaining_cfg["cfg_dest"] + ".yaml",
|
||||
)
|
22
parallel.sh
Normal file
22
parallel.sh
Normal file
@ -0,0 +1,22 @@
|
||||
CONFIG_DIR=$1
|
||||
MAX_JOBS=${3:-3}
|
||||
SLEEP=${4:-1}
|
||||
MAIN=${5:-main}
|
||||
GPU=${6:-0}
|
||||
|
||||
(
|
||||
trap 'kill 0' SIGINT
|
||||
CUR_JOBS=0
|
||||
for CONFIG in "$CONFIG_DIR"/*.yaml; do
|
||||
if [ "$CONFIG" != "$CONFIG_DIR/*.yaml" ]; then
|
||||
((CUR_JOBS >= MAX_JOBS)) && wait -n
|
||||
export CUDA_VISIBLE_DEVICES=$GPU
|
||||
python3 $MAIN.py --explaining_cfg $CONFIG &
|
||||
echo $CONFIG
|
||||
sleep $SLEEP
|
||||
((++CUR_JOBS))
|
||||
fi
|
||||
done
|
||||
|
||||
wait
|
||||
)
|
Loading…
Reference in New Issue
Block a user