-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgenerate_configs_r2r.py
88 lines (81 loc) · 3.12 KB
/
generate_configs_r2r.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
TEMPLATE = """
{
"cache_dir": "./cache/<<DATASET>>",
"gold_standard_configuration": {
"path": "./datasets/req2req/<<DATASET>>/answer.csv",
"hasHeader": "true"
},
"source_artifact_provider" : {
"name" : "text",
"args" : {
"artifact_type" : "requirement",
"path" : "./datasets/req2req/<<DATASET>>/high"
}
},
"target_artifact_provider" : {
"name" : "text",
"args" : {
"artifact_type" : "requirement",
"path" : "./datasets/req2req/<<DATASET>>/low"
}
},
"source_preprocessor" : {
"name" : "artifact",
"args" : {}
},
"target_preprocessor" : {
"name" : "artifact",
"args" : {}
},
"embedding_creator" : {
"name" : "openai",
"args" : {
"model": "text-embedding-3-large"
}
},
"source_store" : {
"name" : "custom",
"args" : { }
},
"target_store" : {
"name" : "custom",
"args" : {
"max_results" : "<<RETRIEVAL_COUNT>>"
}
},
"classifier" : {
"name" : "<<CLASSIFIER_MODE>>",
"args" : {
<<ARGS>>
}
},
"result_aggregator" : {
"name" : "any_connection",
"args" : {}
},
"tracelinkid_postprocessor" : {
"name" : "<<POSTPROCESSOR>>",
"args" : {}
}
}
"""
# Configurations
datasets = ["CM1Dataset", "GANNT", "ModisDataset", "CCHIT", "WARC", "dronology", "CM1-NASA"]
postprocessors = ["req2req", "req2req", "identity", "identity", "req2req", "identity", "identity"]
retrieval_counts = [str(x) for x in [4, 4, 4, 4, 4, 4, 4]]
classifier_modes = ["simple", "reasoning"]
gpt_models = ["gpt-4o-mini-2024-07-18", "gpt-4o-2024-08-06"]
ollama_models = ["llama3.1:8b-instruct-fp16", "codellama:13b"]
# Generate
gpt_args = ["\"model\": \"<<CLASSIFIER_MODEL>>\"".replace("<<CLASSIFIER_MODEL>>", model) for model in gpt_models]
ollama_args = ["\"model\": \"<<CLASSIFIER_MODEL>>\"".replace("<<CLASSIFIER_MODEL>>", model) for model in ollama_models]
for dataset, postprocessor, retrieval_count in zip(datasets, postprocessors, retrieval_counts):
with open(f"./configs/req2req/{dataset}_no_llm.json", "w") as f:
f.write(TEMPLATE.replace("<<DATASET>>", dataset).replace("<<CLASSIFIER_MODE>>", "mock").replace("<<ARGS>>", "").replace("<<POSTPROCESSOR>>", postprocessor).replace("<<RETRIEVAL_COUNT>>", retrieval_count))
for classifier_mode in classifier_modes:
for gpt_model, gpt_arg in zip(gpt_models, gpt_args):
with open(f"./configs/req2req/{dataset}_{classifier_mode}_gpt_{gpt_model}.json", "w") as f:
f.write(TEMPLATE.replace("<<DATASET>>", dataset).replace("<<CLASSIFIER_MODE>>", classifier_mode+"_openai").replace("<<ARGS>>", gpt_arg).replace("<<POSTPROCESSOR>>", postprocessor).replace("RETRIEVAL_COUNT", retrieval_count))
for ollama_model, ollama_arg in zip(ollama_models, ollama_args):
with open(f"./configs/req2req/{dataset}_{classifier_mode}_ollama_{ollama_model.replace(":", "_")}.json", "w") as f:
f.write(TEMPLATE.replace("<<DATASET>>", dataset).replace("<<CLASSIFIER_MODE>>", classifier_mode+"_ollama").replace("<<ARGS>>", ollama_arg).replace("<<POSTPROCESSOR>>", postprocessor).replace("RETRIEVAL_COUNT", retrieval_count))