1
+ import subprocess
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+ from typing import List
5
+ from typing import Dict
6
+ import urllib .request
7
+ import time
8
+ import hashlib
9
+ import json
10
+ import ast
11
+ import copy
12
+ import glob
13
+ import os
14
+
15
+ root_scala_versions = ["2.11.12" , "2.12.19" , "2.13.14" , "3.1.3" , "3.2.2" , "3.3.3" , "3.4.3" , "3.5.0" ]
16
+ scala_test_version = "3.2.9"
17
+ scala_fmt_version = "3.0.0"
18
+
19
+ @dataclass
20
+ class MavenCoordinates :
21
+ group : str
22
+ artifact : str
23
+ version : str
24
+ coordinate : str
25
+
26
+ @dataclass
27
+ class ResolvedArtifact :
28
+ coordinates : MavenCoordinates
29
+ checksum : str
30
+ direct_dependencies : List [MavenCoordinates ]
31
+
32
+ def select_root_artifacts (scala_version ) -> List [str ]:
33
+ scala_major = "." .join (scala_version .split ("." )[:2 ])
34
+ scala_test_major = "3" if scala_major >= "3.0" else scala_major
35
+ scala_fmt_major = "2.13" if scala_major >= "3.0" else scala_major
36
+ kind_projector_version = "0.13.2" if scala_major < "2.13" else "0.13.3"
37
+
38
+ common_root_artifacts = [
39
+ f"org.scalatest:scalatest_{ scala_test_major } :{ scala_test_version } " ,
40
+ f"org.scalameta:scalafmt-core_{ scala_fmt_major } :{ "2.7.5" if scala_major == "2.11" else scala_fmt_version } "
41
+ ]
42
+
43
+ scala_artifacts = [
44
+ f'org.scala-lang:scala3-library_3:{ scala_version } ' ,
45
+ f'org.scala-lang:scala3-compiler_3:{ scala_version } ' ,
46
+ f'org.scala-lang:scala3-interfaces:{ scala_version } ' ,
47
+ f'org.scala-lang:tasty-core_3:{ scala_version } '
48
+ ] if scala_major [0 ] == "3" else [
49
+ f'org.scala-lang:scala-library:{ scala_version } ' ,
50
+ f'org.scala-lang:scala-compiler:{ scala_version } ' ,
51
+ f'org.scala-lang:scala-reflect:{ scala_version } ' ,
52
+ f'org.scalameta:semanticdb-scalac_{ scala_version } :4.9.9' ,
53
+ f'org.typelevel:kind-projector_{ scala_version } :{ kind_projector_version } '
54
+ ]
55
+
56
+ return common_root_artifacts + scala_artifacts
57
+
58
+ def get_maven_coordinates (artifact ) -> MavenCoordinates :
59
+ splitted = artifact .split (':' )
60
+ version = splitted [2 ] if splitted [2 ][0 ].isnumeric () else splitted [3 ]
61
+ return MavenCoordinates (splitted [0 ], splitted [1 ], version , artifact )
62
+
63
+ def get_mavens_coordinates_from_json (artifacts ) -> List [MavenCoordinates ]:
64
+ return list (map (lambda artifact : get_maven_coordinates (artifact ), artifacts ))
65
+
66
+ def get_artifact_checksum (artifact ) -> str :
67
+ output = subprocess .run (f'cs fetch { artifact } ' , capture_output = True , text = True , shell = True ).stdout .splitlines ()
68
+ possible_url = [o for o in output if "https" in o ][0 ]
69
+ possible_url = possible_url [possible_url .find ("https" ):].replace ('https/' , 'https://' )
70
+ try :
71
+ with urllib .request .urlopen (possible_url ) as value :
72
+ body = value .read ()
73
+ return hashlib .sha256 (body ).hexdigest ()
74
+ except urllib .error .HTTPError as e :
75
+ print (f'RESOURCES NOT FOUND: { possible_url } ' )
76
+
77
+ def get_json_dependencies (artifact ) -> List [MavenCoordinates ]:
78
+ with open ('out.json' ) as file :
79
+ data = json .load (file )
80
+ return get_mavens_coordinates_from_json (dependency ["directDependencies" ]) if any ((dependency := d )["coord" ] == artifact for d in data ["dependencies" ]) else []
81
+
82
+ def get_label (coordinate ) -> str :
83
+ if ("org.scala-lang" in coordinate .group or "org.scalatest" in coordinate .group or "org.scalactic" in coordinate .group or "com.twitter" in coordinate .group or "javax.annotation" in coordinate .group ) and "scala-collection" not in coordinate .artifact and "scalap" not in coordinate .artifact :
84
+ return "io_bazel_rules_scala_" + coordinate .artifact .split ('_' )[0 ].replace ('-' , '_' )
85
+ elif "org.openjdk.jmh" in coordinate .group or "org.ow2.asm" in coordinate .group or "net.sf.jopt-simple" in coordinate .group or "org.apache.commons" in coordinate .group or "junit" in coordinate .group or "org.hamcrest" in coordinate .group or "org.specs2" in coordinate .group :
86
+ return "io_bazel_rules_scala_" + coordinate .group .replace ('.' , '_' ).replace ('-' , '_' ) + '_' + coordinate .artifact .split ('_' )[0 ].replace ('-' , '_' )
87
+ elif "mustache" in coordinate .group or "guava" in coordinate .group or "scopt" in coordinate .group :
88
+ return "io_bazel_rules_scala_" + coordinate .group .split ('.' )[- 1 ]
89
+ elif "com.thesamet.scalapb" in coordinate .group or "io." in coordinate .group or "com.google.guava" in coordinate .group :
90
+ return "scala_proto_rules_" + coordinate .artifact .split ('_' )[0 ].replace ('-' , '_' )
91
+ else :
92
+ return (coordinate .group .replace ('.' , '_' ).replace ('-' , '_' ) + '_' + coordinate .artifact .split ('_' )[0 ].replace ('-' , '_' )).replace ('_v2' , '' )
93
+
94
+ def map_to_resolved_artifacts (output ) -> List [ResolvedArtifact ]:
95
+ resolved_artifacts = []
96
+ subprocess .call (f'cs fetch { ' ' .join (output )} --json-output-file out.json' , shell = True )
97
+ for o in output :
98
+ replaced = o .replace (':default' ,'' )
99
+ coordinates = get_maven_coordinates (replaced )
100
+ checksum = get_artifact_checksum (replaced )
101
+ direct_dependencies = get_json_dependencies (replaced )
102
+ resolved_artifacts .append (ResolvedArtifact (coordinates , checksum , direct_dependencies ))
103
+ return resolved_artifacts
104
+
105
+ def resolve_artifacts_with_checksums_and_direct_dependencies (root_artifacts ) -> List [ResolvedArtifact ]:
106
+ command = f'cs resolve { ' ' .join (root_artifacts )} '
107
+ output = subprocess .run (command , capture_output = True , text = True , shell = True ).stdout .splitlines ()
108
+ return map_to_resolved_artifacts (output )
109
+
110
+ def to_rules_scala_compatible_dict (artifacts , version ) -> Dict [str , Dict ]:
111
+ temp = {}
112
+
113
+ for a in artifacts :
114
+ label = get_label (a .coordinates ).replace ('scala3_' , 'scala_' ).replace ('scala_tasty_core' , 'scala_scala_tasty_core' )
115
+ deps = [f'@{ get_label (dep )} _2' if "scala3-library_3" in a .coordinates .artifact else f'@{ get_label (dep )} ' for dep in a .direct_dependencies ]
116
+
117
+ temp [label ] = {
118
+ "artifact" : f"{ a .coordinates .coordinate } " ,
119
+ "sha256" : f"{ a .checksum } " ,
120
+ } if not deps else {
121
+ "artifact" : f"{ a .coordinates .coordinate } " ,
122
+ "sha256" : f"{ a .checksum } " ,
123
+ "deps:" : deps ,
124
+ }
125
+
126
+ return temp
127
+
128
+ def is_that_trailing_coma (content , char , indice ) -> bool :
129
+ return content [indice ] == char and content [indice + 1 ] != ',' and content [indice + 1 ] != ':' and content [indice + 1 ] != '@' and not content [indice + 1 ].isalnum ()
130
+
131
+ def get_with_trailing_commas (content ) -> str :
132
+ copied = copy .deepcopy (content )
133
+ content_length = len (copied )
134
+ i = 0
135
+
136
+ while i < content_length - 1 :
137
+ if is_that_trailing_coma (copied , '"' , i ):
138
+ copied = copied [:i ] + '",' + copied [i + 1 :]
139
+ content_length = content_length + 1
140
+ i = i + 2
141
+ elif is_that_trailing_coma (copied , ']' , i ):
142
+ copied = copied [:i ] + '],' + copied [i + 1 :]
143
+ content_length = content_length + 1
144
+ i = i + 2
145
+ elif is_that_trailing_coma (copied , '}' , i ):
146
+ copied = copied [:i ] + '},' + copied [i + 1 :]
147
+ content_length = content_length + 1
148
+ i = i + 2
149
+ else :
150
+ i = i + 1
151
+
152
+ return copied
153
+
154
+ def write_to_file (artifact_dict , version , file ):
155
+ with file .open ('w' ) as data :
156
+ data .write (f'scala_version = "{ version } "\n ' )
157
+ data .write ('\n artifacts = ' )
158
+ data .write (f'{ get_with_trailing_commas (json .dumps (artifact_dict , indent = 4 ).replace ('true' , 'True' ).replace ('false' , 'False' ))} \n ' )
159
+
160
+ def create_file (version ):
161
+ path = os .getcwd ().replace ('/scripts' , '/third_party/repositories' )
162
+ file = Path (f'{ path } /{ 'scala_' + "_" .join (version .split ("." )[:2 ]) + '.bzl' } ' )
163
+
164
+ if not file .exists ():
165
+ file_to_copy = Path (sorted (glob .glob (f'{ path } /*.bzl' ))[- 1 ])
166
+ with file .open ('w+' ) as data , file_to_copy .open ('r' ) as data_to_copy :
167
+ for line in data_to_copy :
168
+ data .write (line )
169
+
170
+ with file .open ('r+' ) as data :
171
+ excluded_artifacts = ["org.scala-lang.modules:scala-parser-combinators_2.11:1.0.4" ]
172
+ root_artifacts = select_root_artifacts (version )
173
+ read_data = data .read ()
174
+ replaced_data = read_data [read_data .find ('{' ):]
175
+
176
+ original_artifact_dict = ast .literal_eval (replaced_data )
177
+ labels = original_artifact_dict .keys ()
178
+
179
+ transitive_artifacts : List [ResolvedArtifact ] = resolve_artifacts_with_checksums_and_direct_dependencies (root_artifacts )
180
+ generated_artifact_dict = to_rules_scala_compatible_dict (transitive_artifacts , version )
181
+ generated_labels = generated_artifact_dict .keys ()
182
+
183
+ for label in labels :
184
+ if label in generated_labels and generated_artifact_dict [label ]["artifact" ] not in excluded_artifacts :
185
+ artifact = generated_artifact_dict [label ]["artifact" ]
186
+ sha = generated_artifact_dict [label ]["sha256" ]
187
+ deps = generated_artifact_dict [label ]["deps:" ] if "deps:" in generated_artifact_dict [label ] else []
188
+ original_artifact_dict [label ]["artifact" ] = artifact
189
+ original_artifact_dict [label ]["sha256" ] = sha
190
+ if deps :
191
+ dependencies = [d for d in deps if d [1 :] in labels and "runtime" not in d and "runtime" not in artifact ]
192
+ if dependencies :
193
+ original_artifact_dict [label ]["deps" ] = dependencies
194
+
195
+ write_to_file (original_artifact_dict , version , file )
196
+
197
+ for version in root_scala_versions :
198
+ create_file (version )
0 commit comments