File size: 3,396 Bytes
2e1a3f8 5a1ac3e e8c51f1 9d1fa85 e8c51f1 64ac833 e8c51f1 2e1a3f8 9d1fa85 4f67e27 9d1fa85 4f67e27 9d1fa85 2e1a3f8 4f67e27 9e7d7f8 4f67e27 cd3f110 5a1ac3e 9d1fa85 4f67e27 9d1fa85 4f67e27 9d1fa85 4f67e27 9d1fa85 51dd120 4f67e27 9d1fa85 4f67e27 7dff594 4f67e27 9d1fa85 cd3f110 a9179d9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
import sys
import pandas
import gradio
import pathlib
sys.path.append("lib")
import torch
from roberta2 import RobertaForSequenceClassification
from transformers import AutoTokenizer
from gradient_rollout import GradientRolloutExplainer
from rollout import RolloutExplainer
from integrated_gradients import IntegratedGradientsExplainer
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
model = RobertaForSequenceClassification.from_pretrained("textattack/roberta-base-SST-2").to(device)
tokenizer = AutoTokenizer.from_pretrained("textattack/roberta-base-SST-2")
ig_explainer = IntegratedGradientsExplainer(model, tokenizer)
gr_explainer = GradientRolloutExplainer(model, tokenizer)
ro_explainer = RolloutExplainer(model, tokenizer)
def run(sent, gradient, rollout, ig, ig_baseline):
a = gr_explainer(sent, gradient)
b = ro_explainer(sent, rollout)
c = ig_explainer(sent, ig, ig_baseline)
return a, b, c
examples = pandas.read_csv("examples.csv").to_numpy().tolist()
with gradio.Blocks(title="Explanations with attention rollout") as iface:
gradio.Markdown(pathlib.Path("description.md").read_text)
with gradio.Row(equal_height=True):
with gradio.Column(scale=4):
sent = gradio.Textbox(label="Input sentence")
with gradio.Column(scale=1):
but = gradio.Button("Submit")
with gradio.Row(equal_height=True):
with gradio.Column():
rollout_layer = gradio.Slider(
minimum=1,
maximum=12,
value=1,
step=1,
label="Select rollout start layer"
)
with gradio.Column():
gradient_layer = gradio.Slider(
minimum=1,
maximum=12,
value=8,
step=1,
label="Select gradient rollout start layer"
)
with gradio.Column():
ig_layer = gradio.Slider(
minimum=0,
maximum=12,
value=0,
step=1,
label="Select IG layer"
)
ig_baseline = gradio.Dropdown(
label="Baseline token",
choices=['Unknown', 'Padding'], value="Unknown"
)
with gradio.Row(equal_height=True):
with gradio.Column():
gradio.Markdown("### Attention Rollout")
rollout_result = gradio.HTML()
with gradio.Column():
gradio.Markdown("### Gradient-weighted Attention Rollout")
gradient_result = gradio.HTML()
with gradio.Column():
gradio.Markdown("### Layer-Integrated Gradients")
ig_result = gradio.HTML()
gradio.Examples(examples, [sent])
with gradio.Accordion("Some more details"):
gradio.Markdown(pathlib.Path("notice.md").read_text)
gradient_layer.change(gr_explainer, [sent, gradient_layer], gradient_result)
rollout_layer.change(ro_explainer, [sent, rollout_layer], rollout_result)
ig_layer.change(ig_explainer, [sent, ig_layer, ig_baseline], ig_result)
but.click(run,
inputs=[sent, gradient_layer, rollout_layer, ig_layer, ig_baseline],
outputs=[gradient_result, rollout_result, ig_result]
)
iface.launch()
|