Fixing
This commit is contained in:
parent
e871f045a4
commit
f7bb1e7858
|
@ -4,14 +4,12 @@ import shutil
|
||||||
|
|
||||||
from explaining_framework.utils.io import read_yaml, write_yaml
|
from explaining_framework.utils.io import read_yaml, write_yaml
|
||||||
from torch_geometric.data.makedirs import makedirs
|
from torch_geometric.data.makedirs import makedirs
|
||||||
|
from torch_geometric.graphgym.loader import create_dataset
|
||||||
# from torch_geometric.graphgym.loader import load_pyg_dataset
|
from torch_geometric.graphgym.utils.io import string_to_python
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def chunkizing_list(l, n):
|
def chunkizing_list(l, n):
|
||||||
for i in range(0, len(l), n):
|
return [l[i : i + n] for i in range(0, len(l), n)]
|
||||||
yield l[i : i + n]
|
|
||||||
|
|
||||||
|
|
||||||
if "__main__" == __name__:
|
if "__main__" == __name__:
|
||||||
|
@ -23,60 +21,61 @@ if "__main__" == __name__:
|
||||||
makedirs(explaining_folder)
|
makedirs(explaining_folder)
|
||||||
explainer_folder = os.path.join(config_folder, "explaining")
|
explainer_folder = os.path.join(config_folder, "explaining")
|
||||||
makedirs(explainer_folder)
|
makedirs(explainer_folder)
|
||||||
|
chunk = list(range(154, 160, 2))
|
||||||
|
|
||||||
DATASET = [
|
DATASET = [
|
||||||
"CIFAR10",
|
"CIFAR10",
|
||||||
"TRIANGLES",
|
# "TRIANGLES",
|
||||||
"COLORS-3",
|
# "COLORS-3",
|
||||||
"REDDIT-BINARY",
|
# "REDDIT-BINARY",
|
||||||
"REDDIT-MULTI-5K",
|
# "REDDIT-MULTI-5K",
|
||||||
"REDDIT-MULTI-12K",
|
# "REDDIT-MULTI-12K",
|
||||||
"COLLAB",
|
# "COLLAB",
|
||||||
"DBLP_v1",
|
# "DBLP_v1",
|
||||||
"COIL-DEL",
|
# "COIL-DEL",
|
||||||
"COIL-RAG",
|
# "COIL-RAG",
|
||||||
"Fingerprint",
|
# "Fingerprint",
|
||||||
"Letter-high",
|
# "Letter-high",
|
||||||
"Letter-low",
|
# "Letter-low",
|
||||||
"Letter-med",
|
# "Letter-med",
|
||||||
"MSRC_9",
|
# "MSRC_9",
|
||||||
"MSRC_21",
|
# "MSRC_21",
|
||||||
"MSRC_21C",
|
# "MSRC_21C",
|
||||||
"DD",
|
# "DD",
|
||||||
"ENZYMES",
|
# "ENZYMES",
|
||||||
"PROTEINS",
|
# "PROTEINS",
|
||||||
"QM9",
|
# "QM9",
|
||||||
"MUTAG",
|
# "MUTAG",
|
||||||
"Mutagenicity",
|
# "Mutagenicity",
|
||||||
"AIDS",
|
# "AIDS",
|
||||||
"PATTERN",
|
# "PATTERN",
|
||||||
"CLUSTER",
|
# "CLUSTER",
|
||||||
"MNIST",
|
# "MNIST",
|
||||||
"CIFAR10",
|
# "CIFAR10",
|
||||||
"TSP",
|
# "TSP",
|
||||||
"CSL",
|
# "CSL",
|
||||||
"KarateClub",
|
# "KarateClub",
|
||||||
"CS",
|
# "CS",
|
||||||
"Physics",
|
# "Physics",
|
||||||
"BBBP",
|
# "BBBP",
|
||||||
"Tox21",
|
# "Tox21",
|
||||||
"HIV",
|
# "HIV",
|
||||||
"PCBA",
|
# "PCBA",
|
||||||
"MUV",
|
# "MUV",
|
||||||
"BACE",
|
# "BACE",
|
||||||
"SIDER",
|
# "SIDER",
|
||||||
"ClinTox",
|
# "ClinTox",
|
||||||
"AIFB",
|
# "AIFB",
|
||||||
"AM",
|
# "AM",
|
||||||
"MUTAG",
|
# "MUTAG",
|
||||||
"BGS",
|
# "BGS",
|
||||||
"FAUST",
|
# "FAUST",
|
||||||
"DynamicFAUST",
|
# "DynamicFAUST",
|
||||||
"ShapeNet",
|
# "ShapeNet",
|
||||||
"ModelNet10",
|
# "ModelNet10",
|
||||||
"ModelNet40",
|
# "ModelNet40",
|
||||||
"PascalVOC-SP",
|
# "PascalVOC-SP",
|
||||||
"COCO-SP",
|
# "COCO-SP",
|
||||||
]
|
]
|
||||||
EXPLAINER = [
|
EXPLAINER = [
|
||||||
"CAM",
|
"CAM",
|
||||||
|
@ -95,57 +94,56 @@ if "__main__" == __name__:
|
||||||
"SCGNN",
|
"SCGNN",
|
||||||
]
|
]
|
||||||
|
|
||||||
# for dataset_name in DATASET:
|
for dataset_name in DATASET:
|
||||||
# try:
|
for model_kind in ["best", "worst"]:
|
||||||
# dataset = load_pyg_dataset(name=dataset_name, dataset_dir="/tmp/")
|
for explainer_name in EXPLAINER:
|
||||||
# except Exception as e:
|
explaining_cfg = {}
|
||||||
# print(e)
|
# explaining_cfg['adjust']['strategy']= 'rpns'
|
||||||
# continue
|
# explaining_cfg['attack']['name']= 'all'
|
||||||
# for chunk in chunkizing_list(list(range(len(dataset))), 1000):
|
explaining_cfg[
|
||||||
# for model_kind in ["best", "worst"]:
|
"cfg_dest"
|
||||||
# for explainer_name in EXPLAINER:
|
] = f"dataset={dataset_name}-model={model_kind}=explainer={explainer_name}-chunk=[{chunk[0]},{chunk[-1]}]"
|
||||||
# explaining_cfg = {}
|
|
||||||
# # explaining_cfg['adjust']['strategy']= 'rpns'
|
# = string_to_python(
|
||||||
# # explaining_cfg['attack']['name']= 'all'
|
# f"dataset={dataset_name}-model={model_kind}=explainer={explainer_name}"
|
||||||
# explaining_cfg[
|
explaining_cfg["dataset"] = {}
|
||||||
# "cfg_dest"
|
explaining_cfg["dataset"]["name"] = string_to_python(dataset_name)
|
||||||
# ] = f"dataset={dataset_name}-model={model_kind}=explainer={explainer_name}-chunk=[{chunk[0]},{chunk[-1]}]"
|
explaining_cfg["dataset"]["item"] = chunk
|
||||||
# explaining_cfg["dataset"] = {}
|
# explaining_cfg['explainer']['cfg']= 'default'
|
||||||
# explaining_cfg["dataset"]["name"] = dataset_name
|
explaining_cfg["explainer"] = {}
|
||||||
# explaining_cfg["dataset"]["item"] = chunk
|
explaining_cfg["explainer"]["name"] = string_to_python(explainer_name)
|
||||||
# # explaining_cfg['explainer']['cfg']= 'default'
|
explaining_cfg["explainer"]["force"] = True
|
||||||
# explaining_cfg["explainer"] = {}
|
explaining_cfg["explanation_type"] = string_to_python("phenomenon")
|
||||||
# explaining_cfg["explainer"]["name"] = explainer_name
|
# explaining_cfg['metrics']['accuracy']['name']='all'
|
||||||
# explaining_cfg["explanation_type"] = "phenomenon"
|
# explaining_cfg['metrics']['fidelity']['name']='all'
|
||||||
# # explaining_cfg['metrics']['accuracy']['name']='all'
|
# explaining_cfg['metrics']['sparsity']['name']='all'
|
||||||
# # explaining_cfg['metrics']['fidelity']['name']='all'
|
explaining_cfg["model"] = {}
|
||||||
# # explaining_cfg['metrics']['sparsity']['name']='all'
|
explaining_cfg["model"]["ckpt"] = string_to_python(model_kind)
|
||||||
# explaining_cfg["model"] = {}
|
explaining_cfg["model"]["path"] = string_to_python(
|
||||||
# explaining_cfg["model"]["ckpt"] = model_kind
|
"/home/SIC/araison/test_ggym/pytorch_geometric/graphgym/results"
|
||||||
# explaining_cfg["model"][
|
)
|
||||||
# "path"
|
# explaining_cfg['out_dir']='./explanation'
|
||||||
# ] = "/home/SIC/araison/test_ggym/pytorch_geometric/graphgym/results"
|
# explaining_cfg['print']='both'
|
||||||
# # explaining_cfg['out_dir']='./explanation'
|
# explaining_cfg['threshold']['config']['type']='all'
|
||||||
# # explaining_cfg['print']='both'
|
# explaining_cfg['threshold']['value']['hard']=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
|
||||||
# # explaining_cfg['threshold']['config']['type']='all'
|
# explaining_cfg['threshold']['value']['topk']=[2, 3, 5, 10, 20, 30, 50]
|
||||||
# # explaining_cfg['threshold']['value']['hard']=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
|
PATH = os.path.join(
|
||||||
# # explaining_cfg['threshold']['value']['topk']=[2, 3, 5, 10, 20, 30, 50]
|
explaining_folder + "/" + explaining_cfg["cfg_dest"] + ".yaml",
|
||||||
# PATH = os.path.join(
|
)
|
||||||
# explaining_folder + "/" + explaining_cfg["cfg_dest"] + ".yaml",
|
write_yaml(explaining_cfg, PATH)
|
||||||
# )
|
|
||||||
# if os.path.exists(PATH):
|
# if os.path.exists(PATH):
|
||||||
# continue
|
# continue
|
||||||
# else:
|
# else:
|
||||||
# write_yaml(explaining_cfg, PATH)
|
# write_yaml(explaining_cfg, PATH)
|
||||||
configs = [
|
# configs = [
|
||||||
path for path in glob.glob(os.path.join(explaining_folder, "**", "*.yaml"))
|
# path for path in glob.glob(os.path.join(explaining_folder, "**", "*.yaml"))
|
||||||
]
|
# ]
|
||||||
for path in configs:
|
# for path in configs:
|
||||||
data = read_yaml(path)
|
# data = read_yaml(path)
|
||||||
data["model"][
|
# data["model"][
|
||||||
"path"
|
# "path"
|
||||||
] = "/media/data/SIC/araison/exps/pyg_fork/graphgym/results/graph_classif_base_grid_graph_classif_grid"
|
# ] = "/media/data/SIC/araison/exps/pyg_fork/graphgym/results/graph_classif_base_grid_graph_classif_grid"
|
||||||
write_yaml(data, path)
|
# write_yaml(data, path)
|
||||||
|
|
||||||
# for index, config_chunk in enumerate(
|
# for index, config_chunk in enumerate(
|
||||||
# chunkizing_list(configs, int(len(configs) / 5))
|
# chunkizing_list(configs, int(len(configs) / 5))
|
||||||
|
|
|
@ -6,6 +6,20 @@ import os
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from eixgnn.eixgnn import EiXGNN
|
from eixgnn.eixgnn import EiXGNN
|
||||||
|
from scgnn.scgnn import SCGNN
|
||||||
|
from torch_geometric import seed_everything
|
||||||
|
from torch_geometric.data import Batch, Data
|
||||||
|
from torch_geometric.data.makedirs import makedirs
|
||||||
|
from torch_geometric.explain import Explainer
|
||||||
|
from torch_geometric.explain.config import ThresholdConfig
|
||||||
|
from torch_geometric.explain.explanation import Explanation
|
||||||
|
from torch_geometric.graphgym.config import cfg
|
||||||
|
from torch_geometric.graphgym.loader import create_dataset
|
||||||
|
from torch_geometric.graphgym.model_builder import cfg, create_model
|
||||||
|
from torch_geometric.graphgym.utils.device import auto_select_device
|
||||||
|
from torch_geometric.loader.dataloader import DataLoader
|
||||||
|
from yacs.config import CfgNode as CN
|
||||||
|
|
||||||
from explaining_framework.config.explainer_config.eixgnn_config import \
|
from explaining_framework.config.explainer_config.eixgnn_config import \
|
||||||
eixgnn_cfg
|
eixgnn_cfg
|
||||||
from explaining_framework.config.explainer_config.scgnn_config import scgnn_cfg
|
from explaining_framework.config.explainer_config.scgnn_config import scgnn_cfg
|
||||||
|
@ -31,19 +45,6 @@ from explaining_framework.utils.io import (dump_cfg, is_exists,
|
||||||
obj_config_to_str, read_json,
|
obj_config_to_str, read_json,
|
||||||
set_printing, write_json,
|
set_printing, write_json,
|
||||||
write_yaml)
|
write_yaml)
|
||||||
from scgnn.scgnn import SCGNN
|
|
||||||
from torch_geometric import seed_everything
|
|
||||||
from torch_geometric.data import Batch, Data
|
|
||||||
from torch_geometric.data.makedirs import makedirs
|
|
||||||
from torch_geometric.explain import Explainer
|
|
||||||
from torch_geometric.explain.config import ThresholdConfig
|
|
||||||
from torch_geometric.explain.explanation import Explanation
|
|
||||||
from torch_geometric.graphgym.config import cfg
|
|
||||||
from torch_geometric.graphgym.loader import create_dataset
|
|
||||||
from torch_geometric.graphgym.model_builder import cfg, create_model
|
|
||||||
from torch_geometric.graphgym.utils.device import auto_select_device
|
|
||||||
from torch_geometric.loader.dataloader import DataLoader
|
|
||||||
from yacs.config import CfgNode as CN
|
|
||||||
|
|
||||||
all__captum = [
|
all__captum = [
|
||||||
"LRP",
|
"LRP",
|
||||||
|
|
|
@ -26,7 +26,7 @@ def read_yaml(path: str) -> dict:
|
||||||
|
|
||||||
def write_yaml(data: dict, path: str) -> None:
|
def write_yaml(data: dict, path: str) -> None:
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
data = yaml.dump(data, f)
|
data = yaml.safe_dump(data, f, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
def dump_cfg(cfg, path):
|
def dump_cfg(cfg, path):
|
||||||
|
|
Loading…
Reference in New Issue