mirror of
https://github.com/QData/TextAttack.git
synced 2021-10-13 00:05:06 +03:00
resolve merge conflict
This commit is contained in:
@@ -13,17 +13,20 @@
|
||||
(min_cos_sim): 0.5
|
||||
(cased): False
|
||||
(include_unknown_words): True
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): PartOfSpeech(
|
||||
(tagger_type): nltk
|
||||
(tagset): universal
|
||||
(allow_verb_noun_swap): True
|
||||
(compare_against_original): True
|
||||
)
|
||||
(2): UniversalSentenceEncoder(
|
||||
(metric): angular
|
||||
(threshold): 0.904458599
|
||||
(window_size): 15
|
||||
(skip_text_shorter_than_window): True
|
||||
(compare_against_original): False
|
||||
)
|
||||
(3): RepeatModification
|
||||
(4): StopwordModification
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
(constraints):
|
||||
(0): MaxWordsPerturbed(
|
||||
(max_percent): 0.5
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): ThoughtVector(
|
||||
(embedding_type): paragramcf
|
||||
@@ -15,9 +16,11 @@
|
||||
(threshold): -0.2
|
||||
(window_size): inf
|
||||
(skip_text_shorter_than_window): False
|
||||
(compare_against_original): True
|
||||
)
|
||||
(2): GPT2(
|
||||
(max_log_prob_diff): 2.0
|
||||
(compare_against_original): True
|
||||
)
|
||||
(3): RepeatModification
|
||||
(4): StopwordModification
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(constraints):
|
||||
(0): LevenshteinEditDistance(
|
||||
(max_edit_distance): 30
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): RepeatModification
|
||||
(2): StopwordModification
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Attack(
|
||||
/.*/Attack(
|
||||
(search_method): GeneticAlgorithm(
|
||||
(pop_size): 60
|
||||
(max_iters): 20
|
||||
@@ -13,15 +13,18 @@ Attack(
|
||||
(constraints):
|
||||
(0): MaxWordsPerturbed(
|
||||
(max_percent): 0.2
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): WordEmbeddingDistance(
|
||||
(embedding_type): paragramcf
|
||||
(max_mse_dist): 0.5
|
||||
(cased): False
|
||||
(include_unknown_words): True
|
||||
(compare_against_original): True
|
||||
)
|
||||
(2): LearningToWriteLanguageModel(
|
||||
(max_log_prob_diff): 5.0
|
||||
(compare_against_original): True
|
||||
)
|
||||
(3): RepeatModification
|
||||
(4): StopwordModification
|
||||
|
||||
@@ -12,11 +12,13 @@
|
||||
(min_bert_score): 0.8
|
||||
(model): bert-base-uncased
|
||||
(score_type): f1
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): PartOfSpeech(
|
||||
(tagger_type): flair
|
||||
(tagset): universal
|
||||
(allow_verb_noun_swap): True
|
||||
(compare_against_original): True
|
||||
)
|
||||
(2): RepeatModification
|
||||
(3): StopwordModification
|
||||
|
||||
@@ -9,17 +9,20 @@
|
||||
(constraints):
|
||||
(0): MaxWordsPerturbed(
|
||||
(max_num_words): 2
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): WordEmbeddingDistance(
|
||||
(embedding_type): paragramcf
|
||||
(min_cos_sim): 0.8
|
||||
(cased): False
|
||||
(include_unknown_words): True
|
||||
(compare_against_original): True
|
||||
)
|
||||
(2): PartOfSpeech(
|
||||
(tagger_type): nltk
|
||||
(tagset): universal
|
||||
(allow_verb_noun_swap): True
|
||||
(compare_against_original): True
|
||||
)
|
||||
(3): RepeatModification
|
||||
(4): StopwordModification
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
(constraints):
|
||||
(0): LanguageTool(
|
||||
(grammar_error_threshold): 0
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): RepeatModification
|
||||
(2): StopwordModification
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
(constraints):
|
||||
(0): LevenshteinEditDistance(
|
||||
(max_edit_distance): 30
|
||||
(compare_against_original): True
|
||||
)
|
||||
(1): RepeatModification
|
||||
(2): StopwordModification
|
||||
|
||||
@@ -98,18 +98,8 @@ class GoogleLanguageModel(Constraint):
|
||||
|
||||
return [transformed_texts[i] for i in max_el_indices]
|
||||
|
||||
<<<<<<< HEAD
|
||||
def _check_constraint(self, transformed_text, reference_text):
|
||||
return self._check_constraint_many([transformed_text], reference_text)
|
||||
=======
|
||||
def _check_constraint(self, transformed_text, current_text, original_text=None):
|
||||
return self._check_constraint_many(
|
||||
[transformed_text], current_text, original_text=original_text
|
||||
)
|
||||
|
||||
def __call__(self, x, x_adv):
|
||||
raise NotImplementedError()
|
||||
>>>>>>> master
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["top_n", "top_n_per_index"]
|
||||
return ["top_n", "top_n_per_index"] + super().extra_repr_keys()
|
||||
|
||||
@@ -49,4 +49,4 @@ class LanguageModelConstraint(Constraint, ABC):
|
||||
return True
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["max_log_prob_diff"]
|
||||
return ["max_log_prob_diff"] + super().extra_repr_keys()
|
||||
|
||||
@@ -37,4 +37,4 @@ class LanguageTool(Constraint):
|
||||
return errors_added <= self.grammar_error_threshold
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["grammar_error_threshold"]
|
||||
return ["grammar_error_threshold"] + super().extra_repr_keys()
|
||||
|
||||
@@ -97,7 +97,7 @@ class PartOfSpeech(Constraint):
|
||||
return transformation_consists_of_word_swaps(transformation)
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["tagger_type", "tagset", "allow_verb_noun_swap"]
|
||||
return ["tagger_type", "tagset", "allow_verb_noun_swap"] + super().extra_repr_keys()
|
||||
|
||||
|
||||
def zip_flair_result(pred):
|
||||
|
||||
@@ -26,4 +26,4 @@ class BLEU(Constraint):
|
||||
return bleu_score <= self.max_bleu_score
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["max_bleu_score"]
|
||||
return ["max_bleu_score"] + super().extra_repr_keys()
|
||||
|
||||
@@ -25,4 +25,4 @@ class chrF(Constraint):
|
||||
return chrf <= self.max_chrf
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["max_chrf"]
|
||||
return ["max_chrf"] + super().extra_repr_keys()
|
||||
|
||||
@@ -23,4 +23,4 @@ class LevenshteinEditDistance(Constraint):
|
||||
return edit_distance <= self.max_edit_distance
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["max_edit_distance"]
|
||||
return ["max_edit_distance"] + super().extra_repr_keys()
|
||||
|
||||
@@ -52,4 +52,4 @@ class MaxWordsPerturbed(Constraint):
|
||||
metric.append("max_percent")
|
||||
if self.max_num_words is not None:
|
||||
metric.append("max_num_words")
|
||||
return metric
|
||||
return metric + super().extra_repr_keys()
|
||||
|
||||
@@ -24,4 +24,4 @@ class METEOR(Constraint):
|
||||
return meteor <= self.max_meteor
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["max_meteor"]
|
||||
return ["max_meteor"] + super().extra_repr_keys()
|
||||
|
||||
@@ -63,4 +63,4 @@ class BERTScore(Constraint):
|
||||
return False
|
||||
|
||||
def extra_repr_keys(self):
|
||||
return ["min_bert_score", "model", "score_type"]
|
||||
return ["min_bert_score", "model", "score_type"] + super().extra_repr_keys()
|
||||
|
||||
@@ -204,7 +204,7 @@ class SentenceEncoder(Constraint):
|
||||
"threshold",
|
||||
"window_size",
|
||||
"skip_text_shorter_than_window",
|
||||
]
|
||||
] + super().extra_repr_keys()
|
||||
|
||||
|
||||
def get_angular_sim(emb1, emb2):
|
||||
|
||||
@@ -176,4 +176,4 @@ class WordEmbeddingDistance(Constraint):
|
||||
metric = "max_mse_dist"
|
||||
else:
|
||||
metric = "min_cos_sim"
|
||||
return ["embedding_type", metric, "cased", "include_unknown_words"]
|
||||
return ["embedding_type", metric, "cased", "include_unknown_words"] + super().extra_repr_keys()
|
||||
|
||||
Reference in New Issue
Block a user