Skip to content

Commit c549009

Browse files
committed
Assembler for IrvingOne
1 parent 3cb9fc4 commit c549009

File tree

2 files changed

+231
-0
lines changed

2 files changed

+231
-0
lines changed

python/snake

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
start:
2+
trace 1
3+
load r0 4
4+
load r1 1
5+
load r3 -1
6+
7+
loop:
8+
turn r2
9+
move r0
10+
add r2 r1
11+
turn r2
12+
move r0
13+
add r0 r3
14+
add r2 r1
15+
bne r0 r4 loop
16+
17+
stop:
18+
halt

python/turtle-assembler.py

+213
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,213 @@
1+
import re
2+
3+
# Test 'Ri' variants before 'd', since the latter implies the first.
4+
label_tk = re.compile('\s*(?P<name>\w+):')
5+
move1_tk = re.compile('\s*move\s+(?P<Ri>r\d)')
6+
move2_tk = re.compile('\s*move\s+(?P<c>-?\w+)')
7+
turn1_tk = re.compile('\s*turn\s+(?P<Ri>r\d)')
8+
turn2_tk = re.compile('\s*turn\s+(?P<d>-?\w]+)')
9+
load_tk = re.compile('\s*load\s+(?P<Ri>r\d)\s+(?P<n>-?\w+)')
10+
add_tk = re.compile('\s*add\s+(?P<Ri>r\d)\s+(?P<Rj>r\d)')
11+
trace1_tk = re.compile('\s*trace\s+(?P<Ri>r\d)')
12+
trace2_tk = re.compile('\s*trace\s+(?P<d>-?\w+)')
13+
beq_tk = re.compile('\s*beq\s+(?P<Ri>r\d)\s+(?P<Rj>r\d)\s+(?P<a>\w+)')
14+
bne_tk = re.compile('\s*bne\s+(?P<Ri>r\d)\s+(?P<Rj>r\d)\s+(?P<a>\w+)')
15+
bge_tk = re.compile('\s*bge\s+(?P<Ri>r\d)\s+(?P<Rj>r\d)\s+(?P<a>\w+)')
16+
bgt_tk = re.compile('\s*bgt\s+(?P<Ri>r\d)\s+(?P<Rj>r\d)\s+(?P<a>\w+)')
17+
halt_tk = re.compile('\s*halt')
18+
19+
def tokenize_line(line):
20+
# Drop comments
21+
line = re.sub(r';.*', '', line)
22+
23+
# Ignore lines with only whitespace
24+
if re.match('^\s*$', line):
25+
return
26+
27+
# Tokenize
28+
m = label_tk.match(line)
29+
if m:
30+
return dict(type= 'label', name= m.group('name'))
31+
32+
m = move1_tk.match(line)
33+
if m:
34+
return dict(type= 'instr', opcode= 'move_Ri',
35+
Ri= m.group('Ri'))
36+
37+
m = move2_tk.match(line)
38+
if m:
39+
return dict(type= 'instr', opcode= 'move_c',
40+
c= m.group('c'))
41+
42+
m = turn1_tk.match(line)
43+
if m:
44+
return dict(type= 'instr', opcode= 'turn_Ri',
45+
Ri= m.group('Ri'))
46+
47+
m = turn2_tk.match(line)
48+
if m:
49+
return dict(type= 'instr', opcode= 'turn_d',
50+
d= m.group('d'))
51+
52+
m = load_tk.match(line)
53+
if m:
54+
return dict(type= 'instr', opcode= 'load',
55+
Ri= m.group('Ri'), n= m.group('n'))
56+
57+
m = add_tk.match(line)
58+
if m:
59+
return dict(type= 'instr', opcode= 'add',
60+
Ri= m.group('Ri'), Rj= m.group('Rj'))
61+
62+
m = trace1_tk.match(line)
63+
if m:
64+
return dict(type= 'instr', opcode= 'trace_Ri',
65+
Ri= m.group('Ri'))
66+
67+
m = trace2_tk.match(line)
68+
if m:
69+
return dict(type= 'instr', opcode= 'trace_d',
70+
d= m.group('d'))
71+
72+
m = beq_tk.match(line)
73+
if m:
74+
return dict(type= 'instr', opcode= 'beq',
75+
Ri= m.group('Ri'), Rj= m.group('Rj'),
76+
a= m.group('a'))
77+
78+
m = beq_tk.match(line)
79+
if m:
80+
return dict(type= 'instr', opcode= 'beq',
81+
Ri= m.group('Ri'), Rj= m.group('Rj'),
82+
a= m.group('a'))
83+
84+
m = bne_tk.match(line)
85+
if m:
86+
return dict(type= 'instr', opcode= 'bne',
87+
Ri= m.group('Ri'), Rj= m.group('Rj'),
88+
a= m.group('a'))
89+
90+
m = bge_tk.match(line)
91+
if m:
92+
return dict(type= 'instr', opcode= 'bge',
93+
Ri= m.group('Ri'), Rj= m.group('Rj'),
94+
a= m.group('a'))
95+
96+
m = bgt_tk.match(line)
97+
if m:
98+
return dict(type= 'instr', opcode= 'bgt',
99+
Ri= m.group('Ri'), Rj= m.group('Rj'),
100+
a= m.group('a'))
101+
102+
m = halt_tk.match(line)
103+
if m:
104+
return dict(type= 'instr', opcode= 'halt')
105+
106+
raise SyntaxError('Syntax error on line %s' % line)
107+
108+
109+
def tokenize_file(filename):
110+
with open(filename, "r") as f:
111+
r = []
112+
for line in f:
113+
l = tokenize_line(line)
114+
if l:
115+
r.append(l)
116+
return r
117+
118+
def num(s, bits=6):
119+
n = int(s, base=0)
120+
b = 2**bits
121+
if n < b:
122+
return n
123+
else:
124+
raise ValueError('%s does not fit in %s bits' % (n, bits))
125+
126+
def reg(s):
127+
r = int(s[1], base=10)
128+
if 0 <= r <= 7:
129+
return r
130+
else:
131+
raise ValueError('Unknown register %s' % s)
132+
133+
def genlabels(lines):
134+
labels = dict()
135+
pc = 0
136+
instrs = []
137+
for l in lines:
138+
if l['type'] == 'label':
139+
labels[l['name']] = pc
140+
else:
141+
instrs.append(l)
142+
pc += 1
143+
return (instrs, labels)
144+
145+
def gencode(instrs, labels):
146+
codes = []
147+
for (i, pc) in zip(instrs, range(len(instrs))):
148+
if i['opcode'] == 'move_c':
149+
codes.append((0x0, 0, 0, num(i['c'], 6)))
150+
151+
elif i['opcode'] == 'move_Ri':
152+
codes.append((0x1, reg(i['Ri']), 0, 0))
153+
154+
elif i['opcode'] == 'turn_d':
155+
codes.append((0x2, 0, 0, num(i['d'], 2)))
156+
157+
elif i['opcode'] == 'turn_Ri':
158+
codes.append((0x3, reg(i['Ri']), 0, 0))
159+
160+
elif i['opcode'] == 'load':
161+
codes.append((0x4, reg(i['Ri']), 0, num(i['n'], 6)))
162+
163+
elif i['opcode'] == 'add':
164+
codes.append((0x5, reg(i['Ri']), reg(i['Rj']), 0))
165+
166+
elif i['opcode'] == 'trace_d':
167+
codes.append((0x6, 0, 0, num(i['d'], 1)))
168+
169+
elif i['opcode'] == 'trace_Ri':
170+
codes.append((0x7, 0, 0, reg(i['Ri'])))
171+
172+
elif i['opcode'] == 'beq':
173+
codes.append((0x8, reg(i['Ri']), reg(i['Rj']),
174+
labels[i['a']] - pc - 1))
175+
176+
elif i['opcode'] == 'bne':
177+
codes.append((0x9, reg(i['Ri']), reg(i['Rj']),
178+
labels[i['a']] - pc - 1))
179+
180+
elif i['opcode'] == 'bge':
181+
codes.append((0xA, reg(i['Ri']), reg(i['Rj']),
182+
labels[i['a']] - pc - 1))
183+
184+
elif i['opcode'] == 'bgt':
185+
codes.append((0xB, reg(i['Ri']), reg(i['Rj']),
186+
labels[i['a']] - pc - 1))
187+
188+
elif i['opcode'] == 'halt':
189+
codes.append((0xF, 0, 0, 0))
190+
191+
return codes
192+
193+
# Pack lines into 16-bit words
194+
def pack(codes):
195+
r = ""
196+
for c in codes:
197+
r += "%X" % ((c[0] & 0b1111) << 12
198+
|(c[1] & 0b111) << 9
199+
|(c[2] & 0b111) << 6
200+
|(c[3] & 0b111111))
201+
return r
202+
203+
def run():
204+
import sys
205+
f = sys.argv[1]
206+
print(pack(gencode(*genlabels(tokenize_file(f)))))
207+
208+
if __name__ == '__main__': run()
209+
210+
# Debug
211+
# print(tokenize_file("snake"))
212+
# print(gencode(*genlabels(tokenize_file("snake"))))
213+
# print(pack(gencode(*genlabels(tokenize_file("snake")))))

0 commit comments

Comments
 (0)