1
2
3
4
5 package types
6
7 import (
8 "math"
9 "slices"
10
11 "cmd/compile/internal/base"
12 "cmd/internal/src"
13 "internal/buildcfg"
14 "internal/types/errors"
15 )
16
17 var PtrSize int
18
19 var RegSize int
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37 var (
38 SlicePtrOffset int64
39 SliceLenOffset int64
40 SliceCapOffset int64
41
42 SliceSize int64
43 StringSize int64
44 )
45
46 var SkipSizeForTracing bool
47
48
49
50 func typePos(t *Type) src.XPos {
51 if pos := t.Pos(); pos.IsKnown() {
52 return pos
53 }
54 base.Fatalf("bad type: %v", t)
55 panic("unreachable")
56 }
57
58
59 var MaxWidth int64
60
61
62
63 var CalcSizeDisabled bool
64
65
66
67 var defercalc int
68
69
70 func RoundUp(o int64, r int64) int64 {
71 if r < 1 || r > 8 || r&(r-1) != 0 {
72 base.Fatalf("Round %d", r)
73 }
74 return (o + r - 1) &^ (r - 1)
75 }
76
77
78
79 func expandiface(t *Type) {
80 seen := make(map[*Sym]*Field)
81 var methods []*Field
82
83 addMethod := func(m *Field, explicit bool) {
84 switch prev := seen[m.Sym]; {
85 case prev == nil:
86 seen[m.Sym] = m
87 case !explicit && Identical(m.Type, prev.Type):
88 return
89 default:
90 base.ErrorfAt(m.Pos, errors.DuplicateDecl, "duplicate method %s", m.Sym.Name)
91 }
92 methods = append(methods, m)
93 }
94
95 {
96 methods := t.Methods()
97 slices.SortStableFunc(methods, func(a, b *Field) int {
98
99 if a.Sym == nil && b.Sym == nil {
100 return CompareSyms(a.Type.Sym(), b.Type.Sym())
101 }
102
103
104 if a.Sym == nil {
105 return -1
106 } else if b.Sym == nil {
107 return +1
108 }
109
110
111 return CompareSyms(a.Sym, b.Sym)
112 })
113 }
114
115 for _, m := range t.Methods() {
116 if m.Sym == nil {
117 continue
118 }
119
120 CheckSize(m.Type)
121 addMethod(m, true)
122 }
123
124 for _, m := range t.Methods() {
125 if m.Sym != nil || m.Type == nil {
126 continue
127 }
128
129
130
131
132 if !m.Type.IsInterface() {
133 continue
134 }
135
136
137
138 for _, t1 := range m.Type.AllMethods() {
139 f := NewField(m.Pos, t1.Sym, t1.Type)
140 addMethod(f, false)
141
142
143 f.Pos = src.NoXPos
144 }
145
146
147 m.Pos = src.NoXPos
148 }
149
150 slices.SortFunc(methods, CompareFields)
151
152 if int64(len(methods)) >= MaxWidth/int64(PtrSize) {
153 base.ErrorfAt(typePos(t), 0, "interface too large")
154 }
155 for i, m := range methods {
156 m.Offset = int64(i) * int64(PtrSize)
157 }
158
159 t.SetAllMethods(methods)
160 }
161
162
163
164
165 func calcStructOffset(t *Type, fields []*Field, offset int64) int64 {
166 for _, f := range fields {
167 CalcSize(f.Type)
168 offset = RoundUp(offset, int64(f.Type.align))
169
170 if t.IsStruct() {
171 f.Offset = offset
172
173
174
175
176
177 if f.Type.NotInHeap() {
178 t.SetNotInHeap(true)
179 }
180 }
181
182 offset += f.Type.width
183
184 maxwidth := MaxWidth
185
186
187 if maxwidth < 1<<32 {
188 maxwidth = 1<<31 - 1
189 }
190 if offset >= maxwidth {
191 base.ErrorfAt(typePos(t), 0, "type %L too large", t)
192 offset = 8
193 }
194 }
195
196 return offset
197 }
198
199 func isAtomicStdPkg(p *Pkg) bool {
200 if p.Prefix == `""` {
201 panic("bad package prefix")
202 }
203 return p.Prefix == "sync/atomic" || p.Prefix == "internal/runtime/atomic"
204 }
205
206
207
208
209
210
211 func CalcSize(t *Type) {
212
213
214 if base.EnableTrace && SkipSizeForTracing {
215 return
216 }
217 if PtrSize == 0 {
218
219 return
220 }
221
222 if t == nil {
223 return
224 }
225
226 if t.width == -2 {
227 t.width = 0
228 t.align = 1
229 base.Fatalf("invalid recursive type %v", t)
230 return
231 }
232
233 if t.widthCalculated() {
234 return
235 }
236
237 if CalcSizeDisabled {
238 base.Fatalf("width not calculated: %v", t)
239 }
240
241
242 DeferCheckSize()
243
244 lno := base.Pos
245 if pos := t.Pos(); pos.IsKnown() {
246 base.Pos = pos
247 }
248
249 t.width = -2
250 t.align = 0
251 t.alg = AMEM
252
253 if t.Noalg() {
254 t.setAlg(ANOALG)
255 }
256
257 et := t.Kind()
258 switch et {
259 case TFUNC, TCHAN, TMAP, TSTRING:
260 break
261
262
263 default:
264 if SimType[t.Kind()] != 0 {
265 et = SimType[t.Kind()]
266 }
267 }
268
269 var w int64
270 switch et {
271 default:
272 base.Fatalf("CalcSize: unknown type: %v", t)
273
274
275 case TINT8, TUINT8, TBOOL:
276
277 w = 1
278 t.intRegs = 1
279
280 case TINT16, TUINT16:
281 w = 2
282 t.intRegs = 1
283
284 case TINT32, TUINT32:
285 w = 4
286 t.intRegs = 1
287
288 case TINT64, TUINT64:
289 w = 8
290 t.align = uint8(RegSize)
291 t.intRegs = uint8(8 / RegSize)
292
293 case TFLOAT32:
294 w = 4
295 t.floatRegs = 1
296 t.setAlg(AFLOAT32)
297
298 case TFLOAT64:
299 w = 8
300 t.align = uint8(RegSize)
301 t.floatRegs = 1
302 t.setAlg(AFLOAT64)
303
304 case TCOMPLEX64:
305 w = 8
306 t.align = 4
307 t.floatRegs = 2
308 t.setAlg(ACPLX64)
309
310 case TCOMPLEX128:
311 w = 16
312 t.align = uint8(RegSize)
313 t.floatRegs = 2
314 t.setAlg(ACPLX128)
315
316 case TPTR:
317 w = int64(PtrSize)
318 t.intRegs = 1
319 CheckSize(t.Elem())
320 t.ptrBytes = int64(PtrSize)
321
322 case TUNSAFEPTR:
323 w = int64(PtrSize)
324 t.intRegs = 1
325 t.ptrBytes = int64(PtrSize)
326
327 case TINTER:
328 w = 2 * int64(PtrSize)
329 t.align = uint8(PtrSize)
330 t.intRegs = 2
331 expandiface(t)
332 if len(t.allMethods.Slice()) == 0 {
333 t.setAlg(ANILINTER)
334 } else {
335 t.setAlg(AINTER)
336 }
337 t.ptrBytes = int64(2 * PtrSize)
338
339 case TCHAN:
340 w = int64(PtrSize)
341 t.intRegs = 1
342 t.ptrBytes = int64(PtrSize)
343
344 CheckSize(t.Elem())
345
346
347
348 t1 := NewChanArgs(t)
349 CheckSize(t1)
350
351 case TCHANARGS:
352 t1 := t.ChanArgs()
353 CalcSize(t1)
354
355
356
357
358 CalcSize(t1.Elem())
359 if t1.Elem().width >= 1<<16 {
360 base.Errorf("channel element type too large (>64kB)")
361 }
362 w = 1
363
364 case TMAP:
365 w = int64(PtrSize)
366 t.intRegs = 1
367 CheckSize(t.Elem())
368 CheckSize(t.Key())
369 t.setAlg(ANOEQ)
370 t.ptrBytes = int64(PtrSize)
371
372 case TFORW:
373 base.Fatalf("invalid recursive type %v", t)
374
375 case TANY:
376 base.Fatalf("CalcSize any")
377
378 case TSTRING:
379 if StringSize == 0 {
380 base.Fatalf("early CalcSize string")
381 }
382 w = StringSize
383 t.align = uint8(PtrSize)
384 t.intRegs = 2
385 t.setAlg(ASTRING)
386 t.ptrBytes = int64(PtrSize)
387
388 case TARRAY:
389 if t.Elem() == nil {
390 break
391 }
392 CalcArraySize(t)
393 w = t.width
394
395 case TSLICE:
396 if t.Elem() == nil {
397 break
398 }
399 w = SliceSize
400 CheckSize(t.Elem())
401 t.align = uint8(PtrSize)
402 t.intRegs = 3
403 t.setAlg(ANOEQ)
404 if !t.Elem().NotInHeap() {
405 t.ptrBytes = int64(PtrSize)
406 }
407
408 case TSTRUCT:
409 if t.IsFuncArgStruct() {
410 base.Fatalf("CalcSize fn struct %v", t)
411 }
412 CalcStructSize(t)
413 w = t.width
414
415
416
417 case TFUNC:
418 t1 := NewFuncArgs(t)
419 CheckSize(t1)
420 w = int64(PtrSize)
421 t.intRegs = 1
422 t.setAlg(ANOEQ)
423 t.ptrBytes = int64(PtrSize)
424
425
426
427 case TFUNCARGS:
428 t1 := t.FuncArgs()
429
430 w = calcStructOffset(t1, t1.Recvs(), 0)
431 w = calcStructOffset(t1, t1.Params(), w)
432 w = RoundUp(w, int64(RegSize))
433 w = calcStructOffset(t1, t1.Results(), w)
434 w = RoundUp(w, int64(RegSize))
435 t1.extra.(*Func).Argwid = w
436 t.align = 1
437 }
438
439 if PtrSize == 4 && w != int64(int32(w)) {
440 base.Errorf("type %v too large", t)
441 }
442
443 t.width = w
444 if t.align == 0 {
445 if w == 0 || w > 8 || w&(w-1) != 0 {
446 base.Fatalf("invalid alignment for %v", t)
447 }
448 t.align = uint8(w)
449 }
450
451 base.Pos = lno
452
453 ResumeCheckSize()
454 }
455
456
457
458
459
460
461
462 func simdify(st *Type, isTag bool) {
463 st.align = 8
464 st.alg = ANOALG
465 st.intRegs = 0
466 st.isSIMD = true
467 if isTag {
468 st.width = 0
469 st.isSIMDTag = true
470 st.floatRegs = 0
471 } else {
472 st.floatRegs = 1
473 }
474 }
475
476
477
478
479 func CalcStructSize(t *Type) {
480 var maxAlign uint8 = 1
481
482
483
484 if sym := t.Sym(); sym != nil {
485 switch {
486 case sym.Name == "align64" && isAtomicStdPkg(sym.Pkg):
487 maxAlign = 8
488
489 case buildcfg.Experiment.SIMD && (sym.Pkg.Path == "simd/archsimd") && len(t.Fields()) >= 1:
490
491
492 switch sym.Name {
493 case "v128":
494 simdify(t, true)
495 return
496 case "v256":
497 simdify(t, true)
498 return
499 case "v512":
500 simdify(t, true)
501 return
502 }
503 }
504 }
505
506 fields := t.Fields()
507
508 size := calcStructOffset(t, fields, 0)
509
510
511
512
513
514 if size > 0 && fields[len(fields)-1].Type.width == 0 {
515 size++
516 }
517
518 var intRegs, floatRegs uint64
519 for _, field := range fields {
520 typ := field.Type
521
522
523
524 if align := typ.align; align > maxAlign {
525 maxAlign = align
526 }
527
528
529
530 intRegs += uint64(typ.intRegs)
531 floatRegs += uint64(typ.floatRegs)
532 }
533
534
535 size = RoundUp(size, int64(maxAlign))
536
537 if intRegs > math.MaxUint8 || floatRegs > math.MaxUint8 {
538 intRegs = math.MaxUint8
539 floatRegs = math.MaxUint8
540 }
541
542 t.width = size
543 t.align = maxAlign
544 t.intRegs = uint8(intRegs)
545 t.floatRegs = uint8(floatRegs)
546
547
548 t.alg = AMEM
549 if t.Noalg() {
550 t.setAlg(ANOALG)
551 }
552 if len(fields) == 1 && !fields[0].Sym.IsBlank() {
553
554 t.setAlg(fields[0].Type.alg)
555 } else {
556 for i, f := range fields {
557 a := f.Type.alg
558 switch a {
559 case ANOEQ, ANOALG:
560 case AMEM:
561
562 if f.Sym.IsBlank() || IsPaddedField(t, i) {
563 a = ASPECIAL
564 }
565 default:
566
567 a = ASPECIAL
568 }
569 t.setAlg(a)
570 }
571 }
572
573 for i := len(fields) - 1; i >= 0; i-- {
574 f := fields[i]
575 if size := PtrDataSize(f.Type); size > 0 {
576 t.ptrBytes = f.Offset + size
577 break
578 }
579 }
580
581 if len(t.Fields()) >= 1 && t.Fields()[0].Type.isSIMDTag {
582
583 simdify(t, false)
584 }
585 }
586
587
588
589
590 func CalcArraySize(t *Type) {
591 elem := t.Elem()
592 n := t.NumElem()
593 CalcSize(elem)
594 t.SetNotInHeap(elem.NotInHeap())
595 if elem.width != 0 {
596 cap := (uint64(MaxWidth) - 1) / uint64(elem.width)
597 if uint64(n) > cap {
598 base.Errorf("type %L larger than address space", t)
599 }
600 }
601
602 t.width = elem.width * n
603 t.align = elem.align
604
605
606 switch n {
607 case 0:
608 t.intRegs = 0
609 t.floatRegs = 0
610 case 1:
611 t.intRegs = elem.intRegs
612 t.floatRegs = elem.floatRegs
613 default:
614 t.intRegs = math.MaxUint8
615 t.floatRegs = math.MaxUint8
616 }
617 t.alg = AMEM
618 if t.Noalg() {
619 t.setAlg(ANOALG)
620 }
621 switch a := elem.alg; a {
622 case AMEM, ANOEQ, ANOALG:
623 t.setAlg(a)
624 default:
625 switch n {
626 case 0:
627
628 t.setAlg(AMEM)
629 case 1:
630
631 t.setAlg(a)
632 default:
633 t.setAlg(ASPECIAL)
634 }
635 }
636 if n > 0 {
637 x := PtrDataSize(elem)
638 if x > 0 {
639 t.ptrBytes = elem.width*(n-1) + x
640 }
641 }
642 }
643
644 func (t *Type) widthCalculated() bool {
645 return t.align > 0
646 }
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664 var deferredTypeStack []*Type
665
666 func CheckSize(t *Type) {
667 if t == nil {
668 return
669 }
670
671
672
673 if t.IsFuncArgStruct() {
674 base.Fatalf("CheckSize %v", t)
675 }
676
677 if defercalc == 0 {
678 CalcSize(t)
679 return
680 }
681
682
683 if !t.Deferwidth() {
684 t.SetDeferwidth(true)
685 deferredTypeStack = append(deferredTypeStack, t)
686 }
687 }
688
689 func DeferCheckSize() {
690 defercalc++
691 }
692
693 func ResumeCheckSize() {
694 if defercalc == 1 {
695 for len(deferredTypeStack) > 0 {
696 t := deferredTypeStack[len(deferredTypeStack)-1]
697 deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
698 t.SetDeferwidth(false)
699 CalcSize(t)
700 }
701 }
702
703 defercalc--
704 }
705
706
707
708
709
710
711 func PtrDataSize(t *Type) int64 {
712 CalcSize(t)
713 x := t.ptrBytes
714 if t.Kind() == TPTR && t.Elem().NotInHeap() {
715
716
717
718 x = 0
719 }
720 return x
721 }
722
View as plain text