turblimp-evaluations / results-dbmdz-bert-base-turkish-128k-uncased.jsonl
stefan-it's picture
feat: add TurBLiMP evaluations
6bfad66
{"file_name": "data/base/augmented_anaphor_agreement.csv", "mean_difference": 11.675439192771911, "accuracy": 97.7, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_argument_structure_ditransitive.csv", "mean_difference": 14.143673966407777, "accuracy": 96.1, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_argument_structure_transitive.csv", "mean_difference": 12.904011151313782, "accuracy": 99.1, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_binding.csv", "mean_difference": 12.784203224182129, "accuracy": 99.0, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_determiners.csv", "mean_difference": 7.591576636314392, "accuracy": 99.3, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_ellipsis.csv", "mean_difference": 11.30601208305359, "accuracy": 87.5, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_irregular_forms.csv", "mean_difference": 9.455606824874877, "accuracy": 99.6, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_island_effects.csv", "mean_difference": 0.026743251800537108, "accuracy": 51.2, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_nominalization.csv", "mean_difference": 9.842130885601044, "accuracy": 97.39999999999999, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_npi_licensing.csv", "mean_difference": 9.54205815601349, "accuracy": 95.0, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_passives.csv", "mean_difference": 5.202877856254577, "accuracy": 81.3, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_quantifiers.csv", "mean_difference": 13.8296568775177, "accuracy": 98.4, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_relative_clauses.csv", "mean_difference": 14.279878305435181, "accuracy": 98.5, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_scrambling.csv", "mean_difference": 22.592113137245178, "accuracy": 100.0, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_subject_verb_agreement.csv", "mean_difference": 13.545563860416411, "accuracy": 98.8, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}
{"file_name": "data/base/augmented_suspended_affixation.csv", "mean_difference": 21.828987497806548, "accuracy": 100.0, "total_pairs": 1000, "model_name": "dbmdz/bert-base-turkish-128k-uncased"}