Skip to content

Commit e1f512a

Browse files
authored
Merge branch 'main' into slogdet
2 parents 67d2d37 + 4fb76ab commit e1f512a

5 files changed

Lines changed: 338 additions & 100 deletions

File tree

.github/scripts/build.sh

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,31 @@ python third_party/torch-xpu-ops/.github/scripts/apply_torch_pr.py
4949
git submodule sync && git submodule update --init --recursive
5050
python -m pip install -r requirements.txt
5151
python -m pip install mkl-static mkl-include
52-
# python -m pip install -U cmake==3.31.6
52+
export USE_STATIC_MKL=1
5353
export USE_ONEMKL=1
5454
export USE_XCCL=1
55+
export PYTORCH_EXTRA_INSTALL_REQUIREMENTS=" \
56+
intel-cmplr-lib-rt==2025.1.1 | \
57+
intel-cmplr-lib-ur==2025.1.1 | \
58+
intel-cmplr-lic-rt==2025.1.1 | \
59+
intel-sycl-rt==2025.1.1 | \
60+
oneccl-devel==2021.15.1; platform_system == 'Linux' and platform_machine == 'x86_64' | \
61+
oneccl==2021.15.1; platform_system == 'Linux' and platform_machine == 'x86_64' | \
62+
impi-rt==2021.15.0; platform_system == 'Linux' and platform_machine == 'x86_64' | \
63+
onemkl-sycl-blas==2025.1.0 | \
64+
onemkl-sycl-dft==2025.1.0 | \
65+
onemkl-sycl-lapack==2025.1.0 | \
66+
onemkl-sycl-rng==2025.1.0 | \
67+
onemkl-sycl-sparse==2025.1.0 | \
68+
dpcpp-cpp-rt==2025.1.1 | \
69+
intel-opencl-rt==2025.1.1 | \
70+
mkl==2025.1.0 | \
71+
intel-openmp==2025.1.1 | \
72+
tbb==2022.1.0 | \
73+
tcmlib==1.3.0 | \
74+
umf==0.10.0 | \
75+
intel-pti==0.12.0
76+
"
5577

5678
# Build
5779
sed -i "s/checkout --quiet \${TORCH_XPU_OPS_COMMIT}/log -n 1/g" caffe2/CMakeLists.txt

.github/scripts/check-ut.py

Lines changed: 220 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,47 @@
11
import argparse
22
import sys
33
import os
4+
import re
45
from junitparser import JUnitXml, Error, Failure, Skipped
56

6-
parser = argparse.ArgumentParser()
7-
parser.add_argument('junitxml', nargs='+')
7+
parser = argparse.ArgumentParser(description='Test results analyzer')
8+
parser.add_argument('input_files', nargs='+', help='JUnit XML files or log files')
89
args = parser.parse_args()
910

1011
failures = []
11-
suites = []
12+
summaries = []
13+
14+
error_types = [
15+
"RuntimeError",
16+
"ValueError",
17+
"TypeError",
18+
"AttributeError",
19+
"KeyError",
20+
"IndexError",
21+
"ImportError",
22+
"AssertionError",
23+
"Exception",
24+
"OSError",
25+
"Failed",
26+
"TimeoutError",
27+
"asyncio.TimeoutError",
28+
"FileNotFoundError",
29+
"PermissionError",
30+
"NotImplementedError",
31+
]
1232

1333
def get_classname(case):
14-
return ' '.join(case.classname.split())
34+
return ' '.join(case.classname.split()) if hasattr(case, 'classname') else case.get('classname', '')
1535

1636
def get_name(case):
37+
if isinstance(case, dict):
38+
return case.get('name', '')
1739
return ' '.join(case.name.split())
1840

1941
def get_result(case):
42+
if isinstance(case, dict):
43+
return case.get('status', 'failed')
44+
2045
result = "passed"
2146
if case.result:
2247
if isinstance(case.result[0], Error):
@@ -28,88 +53,207 @@ def get_result(case):
2853
return result
2954

3055
def get_message(case):
56+
if isinstance(case, dict):
57+
return case.get('error', '')
58+
3159
if not case.result:
3260
return ""
33-
return f"{case.result[0].message.splitlines()[0]}"
61+
full_text = case.result[0].text if hasattr(case.result[0], 'text') else case.result[0].message
62+
if not full_text:
63+
return ""
64+
65+
error_messages = []
66+
capture_next_lines = False
67+
indent_level = 0
68+
69+
for line in full_text.splitlines():
70+
stripped_line = line.strip()
71+
if not stripped_line:
72+
continue
73+
74+
for error_type in error_types:
75+
if stripped_line.startswith(error_type + ": "):
76+
error_msg = stripped_line[len(error_type)+2:]
77+
error_messages.append(f"{error_type}: {error_msg}")
78+
capture_next_lines = True
79+
indent_level = 0
80+
break
81+
elif f"{error_type}:" in stripped_line and "Traceback" not in stripped_line:
82+
error_msg = stripped_line.split(f'{error_type}:')[-1].strip()
83+
error_messages.append(f"{error_type}: {error_msg}")
84+
capture_next_lines = True
85+
indent_level = 0
86+
break
87+
88+
return " ; ".join(error_messages) if error_messages else f"{case.result[0].message.splitlines()[0]}"
3489

35-
def print_md_row(row, print_header):
90+
def print_md_row(row, print_header=False):
3691
if print_header:
37-
header = " | ".join([f"{key}" for key, _ in row.items()])
92+
header = " | ".join([f"{key}" for key in row.keys()])
3893
print(f"| {header} |")
39-
header = " | ".join(["-"*len(key) for key, _ in row.items()])
94+
header = " | ".join(["---"] * len(row))
4095
print(f"| {header} |")
41-
row = " | ".join([f"{value}" for _, value in row.items()])
42-
print(f"| {row} |")
96+
row_values = " | ".join([f"{value}" for value in row.values()])
97+
print(f"| {row_values} |")
4398

44-
def print_cases(cases):
99+
def print_failures():
100+
if not failures:
101+
return
102+
103+
print("### Test Failures")
45104
print_header = True
46-
for case in cases:
47-
classname = get_classname(case)
48-
name = get_name(case)
49-
result = get_result(case)
50-
message = get_message(case)
51-
row = {
52-
'Class name': classname,
53-
'Test name': name,
54-
'Status': result,
55-
'Message': message,
56-
}
57-
print_md_row(row, print_header)
105+
for case in failures:
106+
print_md_row({
107+
'Class name': get_classname(case),
108+
'Test name': get_name(case),
109+
'Status': get_result(case),
110+
'Message': get_message(case),
111+
'Source': case['source'] if isinstance(case, dict) else 'XML'
112+
}, print_header)
58113
print_header = False
59114

60-
def print_suite(suite):
115+
def parse_log_file(log_file):
116+
with open(log_file, encoding='utf-8') as f:
117+
content = f.read()
118+
119+
ut_name = os.path.splitext(os.path.basename(log_file))[0]
120+
summary = {
121+
'Category': determine_category(ut_name),
122+
'UT': ut_name,
123+
'Test cases': 0,
124+
'Passed': 0,
125+
'Skipped': 0,
126+
'Failures': 0,
127+
'Errors': 0,
128+
'Source': 'Log'
129+
}
130+
131+
# Extract test counts
132+
test_run_match = re.search(r"Ran (\d+) tests in [\d.]+s", content)
133+
if test_run_match:
134+
summary['Test cases'] = int(test_run_match.group(1))
135+
136+
# Extract skipped case number
137+
skipped_match = re.search(r"skipped[ =](\d+)", content, re.IGNORECASE)
138+
if skipped_match:
139+
summary['Skipped'] = int(skipped_match.group(1))
140+
else:
141+
skipped_match = re.search(r"skipped (\d+) cases?", content, re.IGNORECASE)
142+
if skipped_match:
143+
summary['Skipped'] = int(skipped_match.group(1))
144+
145+
# Extract failures
146+
failure_blocks = re.findall(r"(FAIL:.*?)(?:\n\n|\n=+\n|\Z)", content, re.DOTALL)
147+
exist_test_names = set()
148+
failures_number = 0
149+
150+
for block in failure_blocks:
151+
case_match = re.match(r"FAIL: (\w+) \(__mp_main__\.(\w+)\)", block)
152+
if not case_match:
153+
continue
154+
155+
test_name = case_match.group(1)
156+
if test_name in exist_test_names:
157+
continue
158+
exist_test_names.add(test_name)
159+
160+
error_msg = []
161+
error_pattern = r"(" + "|".join(error_types) + r"):.*?(?=\n\S|\n\n|\n=+\n|\Z)"
162+
error_matches = re.finditer(error_pattern, block, re.DOTALL)
163+
if not error_matches and "Traceback" in block:
164+
error_msg.append("Unknown error (see traceback)")
165+
else:
166+
for match in error_matches:
167+
error_msg.append(match.group(0).strip())
168+
169+
failures.append({
170+
'classname': ut_name,
171+
'name': f"{case_match.group(2)}:{test_name}",
172+
'error': " ".join(error_msg),
173+
'status': 'failed',
174+
'source': 'Log'
175+
})
176+
failures_number += 1
177+
178+
if failures_number > summary['Failures']:
179+
summary['Failures'] = failures_number
180+
summary['Passed'] = summary['Test cases'] - summary['Failures'] - summary['Skipped']
181+
182+
return summary
183+
184+
def determine_category(ut):
185+
if ut == 'op_regression':
186+
return 'op_regression'
187+
elif ut == 'op_regression_dev1':
188+
return 'op_regression_dev1'
189+
elif ut == 'op_extended':
190+
return 'op_extended'
191+
elif 'op_ut' in ut:
192+
return 'op_ut'
193+
else:
194+
return 'unknown'
195+
196+
def process_log_file(log_file):
197+
try:
198+
summary = parse_log_file(log_file)
199+
summaries.append(summary)
200+
except Exception as e:
201+
print(f"Error processing {log_file}: {e}", file=sys.stderr)
202+
203+
def process_xml_file(xml_file):
204+
try:
205+
xml = JUnitXml.fromfile(xml_file)
206+
ut = os.path.basename(xml_file).split('.')[0]
207+
category = determine_category(ut)
208+
209+
for suite in xml:
210+
suite_summary = {
211+
'Category': category,
212+
'UT': ut,
213+
'Test cases': suite.tests,
214+
'Passed': suite.tests - suite.skipped - suite.failures - suite.errors,
215+
'Skipped': suite.skipped,
216+
'Failures': suite.failures,
217+
'Errors': suite.errors,
218+
'Source': 'XML'
219+
}
220+
summaries.append(suite_summary)
221+
222+
for case in suite:
223+
if get_result(case) not in ["passed", "skipped"]:
224+
failures.append(case)
225+
except Exception as e:
226+
print(f"Error processing {xml_file}: {e}", file=sys.stderr)
227+
228+
def print_summary():
229+
print("### Results Summary")
61230
print_header = True
62-
for suite in suites:
63-
ut = args.junitxml[0]
64-
del(args.junitxml[0])
65-
ut = os.path.basename(ut).split('.')[0]
66-
tests = suite.tests
67-
skipped = suite.skipped
68-
failures = suite.failures
69-
errors = suite.errors
70-
if ut == 'op_regression':
71-
category = 'op_regression'
72-
elif ut == 'op_regression_dev1':
73-
category = 'op_regression_dev1'
74-
elif ut == 'op_extended':
75-
category = 'op_extended'
76-
elif 'op_ut' in ut:
77-
category = 'op_ut'
78-
row = {
79-
'Category': category,
80-
'UT': ut,
81-
'Test cases': tests,
82-
'Passed': tests-skipped-failures-errors,
83-
'Skipped': skipped,
84-
'Failures': failures,
85-
'Errors': errors,
86-
}
87-
print_md_row(row, print_header)
231+
232+
for summary in summaries:
233+
print_md_row({
234+
'Category': summary['Category'],
235+
'UT': summary['UT'],
236+
'Test cases': summary['Test cases'],
237+
'Passed': summary['Passed'],
238+
'Skipped': summary['Skipped'],
239+
'Failures': summary['Failures'],
240+
'Errors': summary['Errors'],
241+
'Source': summary['Source']
242+
}, print_header)
88243
print_header = False
89244

90-
xmls = [ JUnitXml.fromfile(f) for f in args.junitxml ]
91-
for idx, xml in enumerate(xmls):
92-
for suite in xml:
93-
suites.append(suite)
94-
for case in suite:
95-
classname = get_classname(case)
96-
name = get_name(case)
97-
result = get_result(case)
98-
if result not in ["passed", "skipped"]:
99-
failures.append(case)
100-
101-
printed = False
102-
def print_break(needed):
103-
if needed:
104-
print("")
105-
106-
if failures:
107-
print_break(printed)
108-
print("### Failures")
109-
print_cases(failures)
110-
printed = True
111-
112-
print("### Results Summary")
113-
print_suite(suites)
114-
115-
sys.exit(0)
245+
def main():
246+
for input_file in args.input_files:
247+
if input_file.endswith('.log'):
248+
process_log_file(input_file)
249+
elif input_file.endswith('.xml'):
250+
process_xml_file(input_file)
251+
else:
252+
print(f"Skipping unknown file type: {input_file}", file=sys.stderr)
253+
254+
print_failures()
255+
print_summary()
256+
257+
258+
if __name__ == "__main__":
259+
main()

.github/scripts/env.sh

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,7 @@
11
#!/bin/bash
22

33
source /opt/intel/oneapi/compiler/latest/env/vars.sh
4-
source /opt/intel/oneapi/umf/latest/env/vars.sh
54
source /opt/intel/oneapi/pti/latest/env/vars.sh
5+
source /opt/intel/oneapi/umf/latest/env/vars.sh
66
source /opt/intel/oneapi/ccl/latest/env/vars.sh
77
source /opt/intel/oneapi/mpi/latest/env/vars.sh
8-
source /opt/intel/oneapi/mkl/latest/env/vars.sh
9-
export PYTORCH_EXTRA_INSTALL_REQUIREMENTS="\
10-
intel-cmplr-lib-rt==2025.0.5 |\
11-
intel-cmplr-lib-ur==2025.0.5 |\
12-
intel-cmplr-lic-rt==2025.0.5 |\
13-
intel-sycl-rt==2025.0.5 |\
14-
impi-devel==2021.14.2 |\
15-
oneccl-devel==2021.14.1 |\
16-
mkl-devel==2025.0.1 |\
17-
onemkl-sycl-blas==2025.0.1 |\
18-
onemkl-sycl-dft==2025.0.1 |\
19-
onemkl-sycl-lapack==2025.0.1 |\
20-
tcmlib==1.2.0 | umf==0.9.1 | intel-pti==0.10.2 \
21-
"

0 commit comments

Comments
 (0)