-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathstatic_analyzer.py
More file actions
165 lines (121 loc) · 7.52 KB
/
static_analyzer.py
File metadata and controls
165 lines (121 loc) · 7.52 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
import sys
import os
#script_dir = os.path.dirname(os.path.abspath(__file__))
#sys.path.append(os.path.join(script_dir,'software_metrics','metrics','metrics_cfgs'))
sys.path.append('software_metrics/metrics')
from loc_utils import *
from utils_main import *
from abc_metric_utils import *
from halstead_metric_utils import *
from cyclomatic_complexity_utils import *
from mi_utils import *
import argparse
from datetime import datetime
import hashlib
import random
def main():
parser = argparse.ArgumentParser(description="Count lines of code (LOC) for specific file extensions.")
parser.add_argument('--dir',
type = str,
required = True,
help = "Directory to start counting LOC from.")
parser.add_argument('--json_exts',
type = str,
default = "metrics_cfgs/program_file_exts_map.json",
help = "JSON file containing programming language to file extensions mapping.")
parser.add_argument('--file_exts',
type = str,
default = "metrics_cfgs/program_file_exts.txt",
help = "Text file containing file extensions to count lines for.")
parser.add_argument('--log',
type = bool,
default = True,
help = "whether or not to save LOC analysis to file")
parser.add_argument('--runner_cfg',
type = str,
default = 'metrics_runner_cfg.txt',
help = 'path to the metrics_runner CFG file which specifies which metrics to compute or omit')
args = parser.parse_args()
runner_cfg = parse_runner_cfg(args.runner_cfg)
extensions_map = read_valid_extensions(args.json_exts)
extensions_to_count = read_extensions_to_count(args.file_exts)
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
file_hash = hashlib.sha256(str(random.getrandbits(256)).encode('utf-8')).hexdigest()
if args.log:
if not os.path.exists('logs_{}'.format(file_hash)):
os.makedirs('logs_{}'.format(file_hash[:10]))
if not os.path.exists('logs_{}/abc_metrics'.format(file_hash[:10])):
os.makedirs('logs_{}/abc_metrics'.format(file_hash[:10]))
if not os.path.exists('logs_{}/halstead_metrics'.format(file_hash[:10])):
os.makedirs('logs_{}/halstead_metrics'.format(file_hash[:10]))
if not os.path.exists('logs_{}/cyclomatic_complexity_metrics'.format(file_hash[:10])):
os.makedirs('logs_{}/cyclomatic_complexity_metrics'.format(file_hash[:10]))
if not os.path.exists('logs_{}/loc_metrics'.format(file_hash[:10])):
os.makedirs('logs_{}/loc_metrics'.format(file_hash[:10]))
if runner_cfg['LOC']:
loc_dict = count_lines_of_code(directory = args.dir,
extensions_to_count = extensions_to_count,
extensions_map = extensions_map,
hll_tokens = 'metrics_cfgs/hll_tokens.json',
asm_tokens = 'metrics_cfgs/asm_tokens.json',
ir_tokens = 'metrics_cfgs/ir_tokens.json')
full_loc_dict = loc_full_analysis(loc_dict,extensions_map)
final_loc_dict = append_timestamp_hash(full_dict = full_loc_dict,
timestamp = current_time,
hash_ = file_hash)
_ = dump_final_jsons(prefix_path = './logs_{}/loc_metrics'.format(file_hash[:10]),
final_dicts = final_loc_dict)
if runner_cfg['ABC']:
abc_dict = abc_process_directory(directory = args.dir,
extensions_to_count = extensions_to_count,
extensions_map = extensions_map,
hll_tokens = 'metrics_cfgs/hll_tokens.json',
asm_tokens = 'metrics_cfgs/asm_tokens.json',
ir_tokens = 'metrics_cfgs/ir_tokens.json')
full_abc_dict = abc_full_analysis(abc_dict,extensions_map)
final_abc_dict = append_timestamp_hash(full_dict = full_abc_dict,
timestamp = current_time,
hash_ = file_hash)
_ = dump_final_jsons(prefix_path = './logs_{}/abc_metrics'.format(file_hash[:10]),
final_dicts = final_abc_dict)
if runner_cfg['Halstead']:
halstead_metrics = halstead_process_directory(args.dir,
extensions_to_count,
extensions_map,
hll_tokens = 'metrics_cfgs/hll_tokens.json',
asm_tokens = 'metrics_cfgs/asm_tokens.json',
ir_tokens = 'metrics_cfgs/ir_tokens.json')
full_halstead_dict = halstead_full_analysis(halstead_metrics,
extensions_map)
final_halstead_dict = append_timestamp_hash(full_dict = full_halstead_dict,
timestamp = current_time,
hash_ = file_hash)
_ = dump_final_jsons(prefix_path = './logs_{}/halstead_metrics'.format(file_hash[:10]),
final_dicts = final_halstead_dict)
if runner_cfg['cyclomatic_complexity']:
cc_metrics = cc_process_directory(args.dir,
extensions_to_count,
extensions_map,
hll_tokens = 'metrics_cfgs/hll_tokens.json',
asm_tokens = 'metrics_cfgs/asm_tokens.json',
ir_tokens = 'metrics_cfgs/ir_tokens.json')
full_cc_dict = cc_full_analysis(cc_metrics,extensions_map)
final_cc_dict = append_timestamp_hash(full_dict = full_cc_dict,
timestamp = current_time,
hash_ = file_hash)
_ = dump_final_jsons(prefix_path = './logs_{}/cyclomatic_complexity_metrics'.format(file_hash[:10]),
final_dicts = final_cc_dict)
if runner_cfg['Maintainability_index']:
assert 'cyclomatic_complexity' in runner_cfg.keys()
assert 'Halstead' in runner_cfg.keys()
assert 'LOC' in runner_cfg.keys()
mi_metrics = full_maintainability_calc(full_halstead_dict = full_halstead_dict,
full_cc_dict = full_cc_dict,
full_loc_dict = full_loc_dict)
final_mi_metrics = append_timestamp_hash(full_dict = mi_metrics,
timestamp = current_time,
hash_ = file_hash)
json.dump(final_mi_metrics,open('./logs_{}/maintainability_metrics.json'.format(file_hash[:10]),'w'),indent = 4)
print('metrics finished computing for {}!'.format(args.dir))
if __name__ == "__main__":
main()