-
Notifications
You must be signed in to change notification settings - Fork 86
/
toolflow.py
2734 lines (2459 loc) · 126 KB
/
toolflow.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
A python-based toolflow to build a vivado
project from a simulink design, using the
CASPER xps library.
A work in progress.
"""
import logging
import os
import casper_platform as platform
import yellow_blocks.yellow_block as yellow_block
import blockdesign
import verilog
from constraints import PortConstraint, ClockConstraint, GenClockConstraint, \
ClockGroupConstraint, InputDelayConstraint, OutputDelayConstraint, MaxDelayConstraint, \
MinDelayConstraint, FalsePathConstraint, MultiCycleConstraint, RawConstraint
import castro
import helpers
import yaml
import glob
import time
import hashlib # Added to calculate md5hash of .bin bitstream and add it to the .fpg header
import pickle # Used to dump the pickle of the generated VerilogModule to the build directory for debugging
import struct # Used to append a binary checksum to a bitstream
import csv # read core_info.tab to populate device tree nodes in VitisBackend
# For xml2vhdl generation from Oxford
import xml.dom.minidom
import xml.etree.ElementTree as ET
#JH: I don't know what this is, but I suspect here is a better place for it than constraints.py
MAX_IMAGE_CHUNK_SIZE = 1988
try:
from katversion import get_version as kat_get_version
except ImportError:
kat_get_version = None
class Toolflow(object):
"""
A class embodying the main functionality of the toolflow.
This class is responsible for generating a complete
top-level verilog description of a project from a 'peripherals file'
which encodes information about which IP a user wants instantiated.
The toolflow class can parse such a file, and use it to generate verilog,
a list of source files, and a list of constraints.
These can be passed off to a toolflow backend to be turned into some
vendor-specific platform and compiled. At least, that's the plan...
"""
def __init__(self, frontend='simulink', compile_dir='/tmp',
frontend_target='/tmp/test.slx', jobs=8):
"""
Initialize the toolflow.
:param frontend: Name of the toolflow frontend to use.
Currently only ``simulink`` is supported
:type frontend: str
:param compile_dir: Compile directory where build files and logs
should go.
"""
# Set up a logger (the logger named 'jasper' should already
# have been configured beforehand
self.logger = logging.getLogger('jasper.toolflow')
self.jobs = jobs
self.logger.info('Starting Toolflow!')
self.logger.info('Frontend is %s' % frontend)
self.compile_dir = compile_dir.rstrip('/')
self.output_dir = self.compile_dir + '/outputs'
self.logger.info('Setting compile directory: %s' % self.compile_dir)
os.system('mkdir -p %s' % self.compile_dir)
os.system('mkdir -p %s' % self.output_dir)
# compile parameters which can be set straight away
self.start_time = time.localtime()
self.periph_file = self.compile_dir + '/jasper.per'
self.git_info_file = self.compile_dir + '/git_info.tab'
self.frontend_target = frontend_target
self.modelname = frontend_target.split('/')[-1][:-4] # strip off extension
self.frontend_target_base = os.path.basename(frontend_target)
self.cores = None
self.topfile = None
self.top = None
self.periph_objs = None
self.constraints = None
if frontend == 'simulink':
self.frontend = SimulinkFrontend(compile_dir=self.compile_dir,
target=frontend_target)
else:
self.logger.error('Unsupported toolflow frontent: %s' % frontend)
raise Exception('Unsupported toolflow frontend: %s' % frontend)
self.backend = None
# if backend == 'vivado':
# self.backend = VivadoBackend(compile_dir=self.compile_dir)
# elif backend == 'ise':
# self.backend = ISEBackend(compile_dir=self.compile_dir)
# else:
# self.logger.error('Unsupported toolflow backend: %s'%backend)
# raise Exception('Unsupported toolflow backend: %s'%backend)
self.sources = []
self.ips = []
self.tcl_sources = []
self.const_files = []
# compile directories for xml2vhdl
self.xml_source_dir = self.compile_dir + '/xml2vhdl_source'
self.xml_output_dir = self.compile_dir + '/xml2vhdl_xml_output'
self.hdl_output_dir = self.compile_dir + '/xml2vhdl_hdl_output'
def exec_flow(self, gen_per=True, frontend_compile=True):
"""
Execute a compile.
:param gen_per: Have the toolflow frontend generate a fresh
peripherals file
:type gen_per: bool
:param frontend_compile: Run the frontend compiler (eg. System
Generator)
:type frontend_compile: bool
"""
if gen_per:
self.frontend.gen_periph_file(fname=self.periph_file)
self.frontend.write_git_info_file(fname=self.git_info_file)
# Have the toolflow parse the information from the
# frontend and generate the YellowBlock objects
self.logger.info('Generating peripheral objects')
self.gen_periph_objs()
# Copy the platforms top-level hdl file
# and begin modifying it based on the yellow
# block objects.
self.logger.info('Generating HDL')
self.build_top()
self.generate_hdl()
self.check_templates()
# Generate constraints (not yet xilinx standard)
self.generate_consts()
# Generate software cores file
self.write_core_info()
self.write_core_jam_info()
# print 'Initializing backend project'
# self.backend.initialize(self.plat)
self.constraints_rule_check()
if frontend_compile:
# Run system generator (maybe flow-wise
# it would make sense to run this sooner,
# but since it's the longest single step
# it's nice to run it at the end, so there's
# an opportunity to catch toolflow errors
# before waiting for it
self.logger.info('Running frontend compile')
# skip this step if you don't want to wait for sysgen in testing
self.frontend.compile_user_ip(update=True)
self.logger.info('frontend complete')
self.dump_castro(self.compile_dir+'/castro.yml')
# binary = self.backend.binary_loc
# os.system('cp %s %s/top.bin'%(binary, self.compile_dir))
# mkbof_cmd = '%s/jasper_library/mkbof_64 -o %s/%s -s %s/core_info.ta' \
# 'b -t 3 %s/top.bin' % (os.getenv('MLIB_DEVEL_PATH'),
# self.output_dir, self.output,
# self.compile_dir, self.compile_dir)
# os.system(mkbof_cmd)
# self.logger.info(mkbof_cmd)
def check_attr_exists(self, thing, generator):
"""
Lots of methods in this class require that certain attributes
have been set by other methods before proceeding. This is probably
a symptom of the code being terribly structured. This method
checks if an attribute exists and throws an error message if not.
In principle it could automatically run the necessary missing steps,
but that seems pretty suspect.
:param thing: Attribute to check.
:type thing: str
:param generator: Method which can be used to set thing (used for
error message only)
:type generator: str
"""
if self.__getattribute__(thing) is None:
errmsg = '%s is not defined. Have you run %s yet?' % (
thing, generator)
self.logger.error(errmsg)
raise AttributeError(errmsg)
def _add_external_tcl(self):
"""
Add tcl commands from the frontend
"""
raise DeprecationWarning
for fname in self.tcl_sources:
with open(fname, 'r') as fh:
self.backend.add_tcl_cmd(fh.read())
def generate_hdl(self):
"""
Generates a top file for the target platform
based on the peripherals file.
Internally, calls:
* ``instantiate_periphs``: call each yellow block's mod_top method
* ``instantiate_user_ip``: add ports to top module based on port entries in peripheral file
* ``regenerate_top``: rewrite top.v
"""
self.logger.info('instantiating user peripherals')
self._instantiate_periphs()
self.logger.info('instantiating user_ip')
self._instantiate_user_ip()
self.logger.info('Finalizing top-level design')
self._finalize_top()
self.logger.info('regenerating top')
self.regenerate_top()
self.logger.info('generating auxiliary HDL')
self.generate_peripheral_hdl()
def generate_peripheral_hdl(self):
"""
Create each yellowblock's custom hdl files and add them to the projects sources
"""
self.logger.info('Generating yellow block custom hdl files')
for obj in self.periph_objs:
c = obj.gen_custom_hdl()
for key, val in c.items():
# create file and write the source string to it
filename = '%s/%s' % (self.compile_dir, key)
with open(filename, 'w') as fh:
fh.write(val)
self.sources += [fh.name]
# Also add other sources the yellow blocks
# think we should have
for files in obj.add_build_dir_source():
self.sources += glob.glob(os.path.join(self.compile_dir, files['files']))
def check_templates(self):
"""
Check for any yellow blocks marked with a non-None value of the
`template_project` attribute.
a) Blocks must not have complicting `template_project` values.
b) If any of the `template_project` values are non-None, the
specified `template_project` should be a valid file.
"""
self.template_project = None
for block in self.periph_objs:
if block.template_project is None:
continue
else:
if not os.path.exists(block.template_project):
self.logger.error("Missing template project %s, required"
" by yellow block %s" % (block.template_project, block.name))
raise RuntimeError
if self.template_project is None:
self.template_project = block.template_project
self.logger.info("Block %s specifies template project %s" % (block.name, block.template_project))
if self.template_project != block.template_project:
self.logger.error("Incompatible template project specified: (%s by block %s)" % (block.template_project, block.name))
def _parse_periph_file(self):
"""
Open the peripherals file and parse it's
contents using the pyaml package.
Write the resulting yellow_blocks
and user_modules dictionaries to
attributes
"""
if not os.path.exists(self.periph_file):
self.logger.error('Peripherals file doesn\'t exist!')
raise Exception('Peripherals file doesn\'t exist!')
with open(self.periph_file, 'r') as fh:
yaml_dict = yaml.load(fh, Loader=yaml.Loader)
self.peripherals = yaml_dict['yellow_blocks']
self.user_modules = yaml_dict['user_modules']
def _extract_plat_info(self):
"""
Extract platform information from the
yellow_block attributes.
Use this to instantiate the appropriate
device from the Platform class.
"""
for key in list(self.peripherals.keys()):
if self.peripherals[key]['tag'] == 'xps:xsg':
# self.plat = platform.Platform.get_loader(
# self.peripherals[key]['hw_sys'])
self.plat = platform.Platform(
self.peripherals[key]['hw_sys'].split(':')[0])
# self.backend.plat = self.plat
self.clk_src = self.peripherals[key]['clk_src']
# in MHz
self.clk_rate = float(self.peripherals[key]['clk_rate'])
return
raise Exception('self.peripherals does not contain anything '
'tagged xps:xsg')
def _drc(self):
"""
Get the provisions of the active platform and yellow blocks
and compare with the current requirements of blocks in the design.
"""
provisions = self._get_provisions()
# check all requirements and exclusive reqs are provided
for obj in self.periph_objs:
for req_list in [obj.requires, obj.exc_requires]:
for req in req_list:
self.logger.debug('%s requires %s' % (obj.name, req))
if req not in provisions:
self.logger.error('NOT SATISFIED: %s requires %s' % (
obj.name, req))
raise Exception('DRC FAIL! %s (required by %s) not '
'provided by platform or any '
'peripheral' % (req, obj.name))
# check for overallocation of resources
used = []
for obj in self.periph_objs:
for req in obj.exc_requires:
self.logger.debug('%s requires %s exclusively' % (
obj.name, req))
if req in used:
raise Exception('DRC FAIL! %s requires %s, but it has '
'already been used by another block.'
'' % (obj.name, req))
else:
used.append(req)
def _get_provisions(self):
"""
Get and return all the provisions of the active platform and
yellow blocks.
"""
provisions = []
for obj in self.periph_objs:
provisions += obj.provides
provisions += self.plat.provides
return provisions
def build_top(self):
"""
Copies the base top-level verilog file (which is platform
dependent) to the compile directory.
Constructs an associated VerilogModule instance ready to be
modified.
"""
#TODO: These weird try/except clauses seem to do odd SKARAB-specific stuff
# and probably shouldn't be here. Why not in the SKARAB yellow block?
try:
# generate multiboot, golden or tooflow image based on yaml file
self.hdl_filename = '%s/skarab_infr/%s_parameters.vhd' % (os.getenv('HDL_ROOT'), self.plat.name)
# check to see if parameter file exists. Some platforms may not use this.
if os.path.isfile(self.hdl_filename):
self._gen_hdl_version(filename_hdl=self.hdl_filename)
except KeyError:
s = "" #?!
# check to see if entity file exists. Some platforms may not use this. This function overwrites incorrectly
# generated sysgen hdl files
#if self.platform.conf['bit_reversal']==True:
try:
# return the sysgen entity declarations file
self.hdl_sysgen_filename = '%s/sysgen/hdl_netlist/%s.srcs/sources_1/imports/sysgen/%s_entity_declarations.vhd' \
% (self.compile_dir, self.modelname, self.modelname)
if os.path.isfile(self.hdl_sysgen_filename):
self._gen_hdl_simulink(hdl_sysgen_filename=self.hdl_sysgen_filename)
# just ignore if key is not present as only some platforms will have the key.
except KeyError:
s = "" #?!
self.topfile = self.compile_dir+'/top.v'
# delete top.v file if it exists, otherwise synthesis will fail
if os.path.exists(self.topfile):
os.remove(self.topfile)
# os.system('cp %s %s'%(basetopfile, self.topfile))
self.sources.append(self.topfile)
for source in self.plat.sources:
self.sources.append(os.getenv('HDL_ROOT')+'/'+source)
for source in self.plat.consts:
self.const_files.append(os.getenv('HDL_ROOT') + '/%s/%s' % (
self.plat.name, source))
if os.path.exists(self.topfile):
self.top = verilog.VerilogModule(name='top', topfile=self.topfile)
else:
self.top = verilog.VerilogModule(name='top')
def gen_periph_objs(self):
"""
Generate a list of yellow blocks from the current peripheral file.
Internally, calls:
* ``_parse_periph_file``: parses .per file
* ``_extract_plat_info``: instantiates platform instance
Then calls each yellow block's constructor.
Runs a system-wide drc before returning.
"""
self._parse_periph_file()
self._extract_plat_info()
self.periph_objs = []
for pk in list(sorted(self.peripherals.keys())):
self.logger.debug('Generating Yellow Block: %s' % pk)
self.periph_objs.append(yellow_block.YellowBlock.make_block(
self.peripherals[pk], self.plat))
self._expand_children(self.periph_objs)
self._drc()
def _expand_children(self, population, parents=None, recursive=True):
"""
:param population: yellow blocks to which children will be added
:type population: list
:param parents: yellow blocks which will be invited to procreate.
If parents = None, the population will be used as the initial
parents argument
:type parents: list
:param recursive: if True, this method is called recursively, with children
passed as the new parents argument. The population list
will continue to grow until no child yellow blocks wish
to procreate any further.
:type recursive: bool
"""
parents = parents or population
children = []
for parent in parents:
self.logger.debug('Inviting block %r to procreate' % parent)
children += parent.gen_children()
if not children:
return
else:
population += children
if not recursive:
return
else:
self._expand_children(population, children)
return
def _instantiate_periphs(self):
"""
Calls each yellow block's modify_top method against the class'
top VerilogModule instance
"""
self.logger.info('top: %s' % self.topfile)
for obj in self.periph_objs:
self.logger.debug('modifying top for obj %s' % obj.name)
# self.top.set_cur_blk(obj.fullname)
if '/' in obj.fullpath:
obj.fullpath = obj.fullpath.partition('/')[2]
self.top.set_cur_blk('%s: %s'%(obj.tag.split(':')[1], obj.fullpath))
obj.modify_top(self.top)
self.sources += obj.sources
self.ips += obj.ips
# add AXI4-Lite architecture specfic stuff, which must be called after all yellow blocks have modified top.
if 'AXI4-Lite' in self.plat.mmbus_architecture:
# Make an AXI4-Lite interconnect yellow block and let it modify top
axi4lite_interconnect = yellow_block.YellowBlock.make_block(
{'tag': 'xps:axi4lite_interconnect', 'name': 'axi4lite_interconnect',
'fullpath': list(sorted(self.user_modules.keys()))[0] +'/axi4lite_interconnect'}, self.plat)
axi4lite_interconnect.modify_top(self.top)
self.sources += axi4lite_interconnect.sources
self.ips += axi4lite_interconnect.ips
# Generate xml2vhdl
self.xml2vhdl()
# add the AXI4lite yellowblock to the peripherals manually
self.periph_objs.append(axi4lite_interconnect)
def _finalize_top(self):
"""
Call every Yellow Block's `finalize_top` method, in case
any of them want to modify the design now all the peripherals
and user IP have been instantiated.
"""
for obj in self.periph_objs:
self.top = obj.finalize_top(self.top)
def _instantiate_user_ip(self):
"""
Adds VerilogInstance and ports associated with user-ip to the class' top
VerilogModule instance.
"""
for name, usermodule in list(self.user_modules.items()):
inst = self.top.get_instance(entity=name, name='%s_inst' % name)
self.top.set_cur_blk('usermodule: %s'%name)
# internal = False --> we assume that other yellow
# blocks have set up appropriate signals in top.v
# (we can't add them here anyway, because we don't
# know the port widths)
if 'clock' in list(sorted(usermodule.keys())):
inst.add_port(name=usermodule['clock'], signal='user_clk',
parent_sig=False)
if 'clock_enable' in list(sorted(usermodule.keys())):
inst.add_port(name=usermodule['clock_enable'], signal='1\'b1',
parent_sig=False)
for port in usermodule['ports']:
inst.add_port(name=port, signal=port, parent_sig=False)
if usermodule['sources'] is not None:
for source in usermodule['sources']:
self.sources += glob.glob(source)
# if usermodule['tcl_sources'] is not None:
# for source in usermodule['tcl_sources']:
# self.tcl_sources += glob.glob(source)
def write_core_info(self):
self.cores = []
if 'AXI4-Lite' in self.plat.mmbus_architecture:
# get list of all axi4lite_devices in self.top.memory_map dict
for val in list(self.top.memory_map.values()):
self.cores += val['axi4lite_devices']
for val in self.top.rfdc_devices:
self.cores += [val]
if 'wishbone' in self.plat.mmbus_architecture:
self.cores += self.top.wb_devices
for val in self.top.xil_axi4lite_devices:
self.cores += [val]
basefile = '%s/%s/core_info.tab' % (os.getenv('HDL_ROOT'),
self.plat.name)
newfile = '%s/core_info.tab' % self.compile_dir
self.logger.debug('Opening %s' % basefile)
modemap = {'rw': 3, 'r': 1, 'w': 2}
try:
with open(basefile, 'r') as fh:
s = fh.read()
# If there isn't a basefile, just plow on
except IOError:
s = ''
if len(self.cores) != 0:
longest_name = max([len(core.regname) for core in self.cores])
format_str = '{0:%d} {1:1} {2:<16x} {3:<16x}\n' % longest_name
for core in self.cores:
self.logger.debug('Adding core_info.tab entry for '
'%s' % core.regname)
s += format_str.format(core.regname, modemap[core.mode],
core.base_addr, core.nbytes)
# add aliases if the WB Devices have them
# Add the core's register name as a prefix, because memory map
# names need not be unique!
for reg in core.memory_map:
s += format_str.format(core.regname + '_' + reg.name, modemap[reg.mode],
core.base_addr + reg.offset, reg.nbytes)
self.logger.debug('Opening %s' % basefile)
with open(newfile, 'w') as fh:
fh.write(s)
def write_core_jam_info(self):
self.cores = []
if 'AXI4-Lite' in self.plat.mmbus_architecture:
# get list of all axi4lite_devices in self.top.memory_map dict
for val in list(self.top.memory_map.values()):
self.cores += val['axi4lite_devices']
if 'wishbone' in self.plat.mmbus_architecture:
self.cores += self.top.wb_devices
for val in self.top.xil_axi4lite_devices:
self.cores += [val]
basefile = '%s/%s/core_info.jam.tab' % (os.getenv('HDL_ROOT'), self.plat.name)
newfile = '%s/core_info.jam.tab' % self.compile_dir
self.logger.debug('Opening %s' % basefile)
modemap = {'rw': 3, 'r': 1, 'w': 2}
try:
with open(basefile, 'r') as fh:
s = fh.read()
# If there isn't a basefile, just plow on
except IOError:
s = ''
if len(self.cores) != 0:
longest_name = max([len(core.regname) for core in self.cores])
format_str = '{0:%d} {1:1} {2:<16x} {3:<16x} {4:<2x}\n' % longest_name
for core in self.cores:
self.logger.debug('Adding core_info.jam.tab entry for %s' % core.regname)
s += format_str.format(core.regname, modemap[core.mode], core.base_addr, core.nbytes, core.typecode)
# add aliases if the WB Devices have them
for reg in core.memory_map:
s += format_str.format(core.regname + '_' + reg.name, modemap[reg.mode], core.base_addr + reg.offset, reg.nbytes, core.typecode)
self.logger.debug('Opening %s' % basefile)
with open(newfile, 'w') as fh:
fh.write(s)
# generate the binary and xilinx-style .mem versions of this table,
# using Python script [TODO convert to a callable function?].
ret = os.system('python %s/jasper_library/cit2csl.py -b %s > %s.bin' % (os.getenv('MLIB_DEVEL_PATH'), newfile, newfile))
if ret != 0:
errmsg = 'Failed to generate binary file {}.bin, error code {}.'.format(newfile,ret)
self.logger.error(errmsg)
raise Exception(errmsg)
ret = os.system('python %s/jasper_library/cit2csl.py %s > %s.mem' % (os.getenv('MLIB_DEVEL_PATH'), newfile, newfile))
if ret != 0:
errmsg = 'Failed to generate xilinx-style file {}.mem, error code {}.'.format(newfile,ret)
self.logger.error(errmsg)
raise Exception(errmsg)
def regenerate_top(self):
"""
Generate the verilog for the modified top
module. This involves computing the wishbone
interconnect / addressing and generating new
code for yellow block instances.
"""
# Decide if we're going to use a hierarchical arbiter.
self.logger.debug("Looking for a max_devices_per_arbiter spec")
if 'max_devices_per_arbiter' in self.plat.conf:
self.top.max_devices_per_arb = self.plat.conf['max_devices_per_arbiter']
self.logger.debug("Found max_devices_per_arbiter: %s" % self.top.max_devices_per_arb)
# Check for memory map bus architecture, added to support AXI4-Lite
if 'AXI4-Lite' in self.plat.mmbus_architecture:
pass
if 'wishbone' in self.plat.mmbus_architecture:
self.top.wb_compute(self.plat.dsp_wb_base_address,
self.plat.dsp_wb_base_address_alignment)
# Write top module file
self.top.gen_module_file(filename=self.compile_dir+'/top.v')
# Write any submodule files required for the compile. This is probably
# only the hierarchical WB arbiter, or nothing at all
for key, val in self.top.generated_sub_modules.items():
self.logger.info("Writing sub module file %s.v" % key)
with open(self.compile_dir+'/%s.v'%key, 'w') as fh:
fh.write(val)
self.sources.append(fh.name)
self.logger.info("Dumping pickle of top-level Verilog module")
pickle.dump(self.top, open('%s/top.pickle' % self.compile_dir,'wb'))
def generate_consts(self):
"""
Compose a list of constraints from each yellow block.
Use platform information to generate the appropriate
physical realisation of each constraint.
"""
self.logger.info('Extracting constraints from peripherals')
self.check_attr_exists('periph_objs', 'gen_periph_objs()')
self.constraints = []
for obj in self.periph_objs:
self.logger.info('Getting constraints for block %s' % obj.name)
constraints = obj.gen_constraints()
if constraints is None:
# If there are no constraints, move on
continue
for constraint in constraints:
#if isinstance(constraint, PortConstraint) and self.template_project is not None:
# self.logger.info('Skipping PortConstraint because this is a PR run')
# # Partial reconfiguration projects have pin constraints
# # defined at the top-level. If we're in PR mode, skip them
# continue
self.constraints += [constraint]
self.logger.info('Generating physical constraints')
for constraint in self.constraints:
try:
constraint.gen_physical_const(self.plat)
except AttributeError:
pass # some constraints don't have this method
def constraints_rule_check(self):
"""
Check pin constraints against top level signals.
Warn about missing constraints.
"""
self.logger.info('Carrying out constraints rule check')
port_constraints = []
for const in self.constraints:
if isinstance(const, PortConstraint):
port_constraints += [const.portname]
for key in list(sorted(self.top.ports.keys())):
for port in self.top.ports[key]:
if port not in port_constraints:
self.logger.warning('Port %s (instantiated by %s) has no constraints!' % (port, key))
self.logger.info('Constraint rule check complete')
def dump_castro(self, filename):
"""
Build a 'standard' Castro object, which is the
interface between the toolflow and the backends.
"""
import castro
c = castro.Castro(self.top.name, self.sources, self.ips, template_project=self.template_project)
# build castro standard pin constraints
pin_constraints = []
clk_constraints = []
gen_clk_constraints = []
clk_grp_constraints = []
input_delay_constraints = []
output_delay_constraints = []
max_delay_constraints = []
min_delay_constraints = []
false_path_constraints = []
multi_cycle_constraints = []
raw_constraints = []
for const in self.constraints:
if isinstance(const, PortConstraint):
pin_constraints += [castro.PinConstraint(
portname=const.portname,
symbolic_name=const.iogroup,
portname_indices=const.port_index,
symbolic_indices=const.iogroup_index,
io_standard=const.iostd,
drive_strength=const.drive_strength,
location=const.loc,
diff_term=const.diff_term
)]
elif isinstance(const, ClockConstraint):
clk_constraints += [castro.ClkConstraint(
portname=const.signal,
freq_mhz=const.freq,
period_ns=const.period,
clkname=const.name,
waveform_min_ns=const.waveform_min,
waveform_max_ns=const.waveform_max,
port_en=const.port_en,
virtual_en=const.virtual_en
)]
elif isinstance(const, GenClockConstraint):
gen_clk_constraints += [castro.GenClkConstraint(
pinname=const.signal,
clkname=const.name,
divide_by=const.divide_by,
clksource=const.clock_source
)]
elif isinstance(const, ClockGroupConstraint):
clk_grp_constraints += [castro.ClkGrpConstraint(
clknamegrp1=const.clock_name_group_1,
clknamegrp2=const.clock_name_group_2,
clkdomaintype=const.clock_domain_relationship
)]
elif isinstance(const, InputDelayConstraint):
input_delay_constraints += [castro.InDelayConstraint(
clkname=const.clkname,
consttype=const.consttype,
constdelay_ns=const.constdelay_ns,
add_delay_en=const.add_delay_en,
portname=const.portname
)]
elif isinstance(const, OutputDelayConstraint):
output_delay_constraints += [castro.OutDelayConstraint(
clkname=const.clkname,
consttype=const.consttype,
constdelay_ns=const.constdelay_ns,
add_delay_en=const.add_delay_en,
portname=const.portname
)]
elif isinstance(const, MaxDelayConstraint):
max_delay_constraints += [castro.MaxDelayConstraint(
sourcepath=const.sourcepath,
destpath=const.destpath,
constdelay_ns=const.constdelay_ns
)]
elif isinstance(const, MinDelayConstraint):
min_delay_constraints += [castro.MinDelayConstraint(
sourcepath=const.sourcepath,
destpath=const.destpath,
constdelay_ns=const.constdelay_ns
)]
elif isinstance(const, FalsePathConstraint):
false_path_constraints += [castro.FalsePthConstraint(
sourcepath=const.sourcepath,
destpath=const.destpath
)]
elif isinstance(const, MultiCycleConstraint):
multi_cycle_constraints += [castro.MultiCycConstraint(
multicycletype=const.multicycletype,
sourcepath=const.sourcepath,
destpath=const.destpath,
multicycledelay=const.multicycledelay
)]
elif isinstance(const, RawConstraint):
raw_constraints += [castro.RawConstraint(
const.raw)]
c.synthesis = castro.Synthesis()
c.synthesis.pin_constraints = pin_constraints
c.synthesis.clk_constraints = clk_constraints
c.synthesis.gen_clk_constraints = gen_clk_constraints
c.synthesis.clk_grp_constraints = clk_grp_constraints
c.synthesis.input_delay_constraints = input_delay_constraints
c.synthesis.output_delay_constraints = output_delay_constraints
c.synthesis.max_delay_constraints = max_delay_constraints
c.synthesis.min_delay_constraints = min_delay_constraints
c.synthesis.false_path_constraints = false_path_constraints
c.synthesis.multi_cycle_constraints = multi_cycle_constraints
c.synthesis.raw_constraints = raw_constraints
c.synthesis.platform_name = self.plat.name
c.synthesis.fpga_manufacturer = self.plat.manufacturer
c.synthesis.fpga_model = self.plat.fpga
c.synthesis.pin_map = self.plat._pins
mm_slaves = []
if 'AXI4-Lite' in self.plat.mmbus_architecture:
for dev in self.top.axi4lite_devices:
if dev.mode == 'rw':
mode = 3
elif dev.mode == 'r':
mode = 1
elif dev.mode == 'w':
mode = 2
else:
mode = 1
mm_slaves += [castro.mm_slave(dev.regname, mode, dev.base_addr,
dev.nbytes)]
if 'wishbone' in self.plat.mmbus_architecture:
for dev in self.top.wb_devices:
if dev.mode == 'rw':
mode = 3
elif dev.mode == 'r':
mode = 1
elif dev.mode == 'w':
mode = 2
else:
mode = 1
mm_slaves += [castro.mm_slave(dev.regname, mode, dev.base_addr,
dev.nbytes)]
c.mm_slaves = mm_slaves
with open(filename, 'w') as fh:
fh.write(yaml.dump(c))
def _gen_hdl_version(self, filename_hdl):
"""
This function reads the existing version information from the HDL file and rewrites the version information and
appends it with an "8" (golden), "4" (multiboot) or "0" (toolflow)
:param filename_hdl: This is the path and hdl file that
contains the original FPGA version information. This file is overwritten with new multiboot, toolflow or
golden image info before being imported to the compile directory
directory
:type filename_bin: str
"""
stringToMatch = 'constant C_VERSION'
lines = []
self.logger.debug('Opening Original hdl file %s' % filename_hdl)
# read version info from original file and write appended version info to a new list that will be
# written into a new file
with open(filename_hdl, 'r') as fh1:
for line in fh1:
if stringToMatch in line:
if self.plat.boot_image == 'golden':
linesub = line[:line.find('X')+2] +'8'+ line[line.find('X')+3:]
lines.append(linesub)
elif self.plat.boot_image == 'multiboot':
linesub = line[:line.find('X')+2] +'4'+ line[line.find('X')+3:]
lines.append(linesub)
else:
linesub = line[:line.find('X')+2] +'0'+ line[line.find('X')+3:]
lines.append(linesub)
else:
lines.append(line)
#print (lines)
fh1.close()
# write new version info to the same file that will be imported to the correct folder
with open(filename_hdl, 'w') as fh2:
fh2.writelines(lines)
fh2.close()
def generate_xml_memory_map(self, memory_map):
"""
Generate xml memory map files that represent each AXI4-Lite interface for Oxford's xml2vhdl.
"""
# Generate memory map xml file for each interface in memory_map
for interface in list(sorted(memory_map.keys())):
xml_root = ET.Element('node')
xml_root.set('id', interface)
# fill xml node with slave info from memory map
for reg in memory_map[interface]['memory_map']:
# add a child to parent node
node = ET.SubElement(xml_root, 'node')
node.set('id', reg.name)
node.set('address', "%s" % hex(reg.offset))
# toolflow only currently supports 32-bit registers
node.set('mask', hex(0xFFFFFFFF))
# node.set('size', str(reg.nbytes))
node.set('permission', reg.mode)
node.set('axi4lite_mode', reg.axi4lite_mode)
if reg.mode == 'r':
if int(reg.default_val,16) != 0:
# Populate defaults of readable registers which
# Aren't driven by the fabric. I.e., static compile-time
# registers.
node.set('hw_rst', reg.default_val)
else:
# "Normal" read-only registers get written to from the
# fabric every cycle.
# To get to this clause it is important that simulink read-only
# software registers aren't given a default value. (Which wouldn't
# make sense)
node.set('hw_permission', 'w')
else:
# Only for a From Processor register (control)
node.set('hw_rst', reg.default_val)
# Best we can currently do for a description...? haha
node.set('description', str(interface + "_" + reg.name))
# set bram size and
if hasattr(reg, 'ram') and reg.ram==True:
node.set('hw_dp_ram', 'yes')
node.set('size', str(reg.nbytes//4)) # this needs to be in words not bytes!!! Dammit Janet
# Need to make special mention of the bitwidth (data width) here
# - Reading from xml2slave.py - need the key 'hw_dp_ram_width'
node.set('hw_dp_ram_width', str(reg.data_width))
# output xml file describing memory map as input for xml2vhdl
myxml = xml.dom.minidom.parseString(ET.tostring(xml_root))
xml_base_name = interface + "_memory_map.xml"
xml_file_name = os.path.join(self.xml_source_dir, xml_base_name)
xml_file = open(xml_file_name, "w")
xml_text = myxml.toprettyxml()
xml_text += "<!-- This file has been automatically generated by generate_xml_memory_map function." + " /!-->\n"
xml_file.write(xml_text)
xml_file.close()
def generate_xml_ic(self, memory_map):
"""
Generate xml interconnect file that represent top-level AXI4-Lite interconnect for Oxford's xml2vhdl.
"""
#TODO: Fix the above docstring to be more descriptive. And maybe give some hint about what the heck
# `memory_map` is supposed to be.
# loop over interfaces, sort by address, make interconnect
xml_root = ET.Element('node')
xml_root.set('id', 'axi4lite_top')
xml_root.set('address', hex(self.plat.axi_ic_base_address))
xml_root.set('hw_type', 'ic')
for interface in list(sorted(memory_map.keys())):
# add a child to parent node
node = ET.SubElement(xml_root, 'node')
node.set('id', interface)
node.set('address', "%s" % memory_map[interface]['relative_address'])
node.set('link', "%s" % interface + "_memory_map_output.xml")
# output xml file describing interconnect as input for xml2vhdl
myxml = xml.dom.minidom.parseString(ET.tostring(xml_root))
xml_base_name = "axi4lite_top_ic_memory_map.xml"
xml_file_name = os.path.join(self.xml_source_dir, xml_base_name)
xml_file = open(xml_file_name, "w")
xml_text = myxml.toprettyxml()
xml_text += "<!-- This file has been automatically generated by generate_xml_memory_map function." + " /!-->\n"
xml_file.write(xml_text)
xml_file.close()
def xml2vhdl(self):
"""
Function to call Oxford's python code to generate AXI4-Lite VHDL register
interfaces from an XML memory map specification.
Obtained from: https://bitbucket.org/ricch/xml2vhdl/src/master/
"""
from xml2vhdl.xml2vhdl import Xml2VhdlGenerate, helper
# make input and output directories
if not os.path.exists(self.xml_source_dir):
os.makedirs(self.xml_source_dir)
if not os.path.exists(self.xml_output_dir):
os.makedirs(self.xml_output_dir)
if not os.path.exists(self.hdl_output_dir):
os.makedirs(self.hdl_output_dir)
# generate xml memory maps for input
self.generate_xml_memory_map(self.top.memory_map)
# generate xml interconnect for input
self.generate_xml_ic(self.top.memory_map)
# execute xml2vhdl generation
try:
# Xml2VhdlGenerate takes arguments as attributes of an args class
args = helper.arguments.Arguments()
# see the help of the xml2vhdl.py script
args.input_folder = [self.xml_source_dir] # Needs to be a list (can be multiple directories)
args.vhdl_output = self.hdl_output_dir
args.xml_output = self.xml_output_dir
args.bus_library = "xil_defaultlib"
args.slave_library = "xil_defaultlib"
self.logger.info("Trying to generate AXI HDL from XML")
self.logger.info(" Input directory: %s" % args.input_folder)
self.logger.info(" Output XML directory: %s" % args.xml_output)
self.logger.info(" Output directory: %s" % args.vhdl_output)
self.logger.info(" Slave library: %s" % args.slave_library)
self.logger.info(" Bus library: %s" % args.bus_library)
Xml2VhdlGenerate(args)
except:
self.logger.error("Failed to generate AXI HDL from XML!")
# Throw whatever error was caught
raise
def _gen_hdl_simulink(self, hdl_sysgen_filename):
"""
This function replaces incorrectly generated simulink sysgen code with the proper code. In this case, the
dual port ram latency is incorrectly generated when using Vivado 2018.2, 2018.2.2. The code is only replaced if
the dual port ram is utilised and the 2018.2, 2018.2.2 version is detected.