1
2
3
4
5 package types
6
7 import (
8 "math"
9 "slices"
10
11 "cmd/compile/internal/base"
12 "cmd/internal/src"
13 "internal/types/errors"
14 )
15
16 var PtrSize int
17
18 var RegSize int
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36 var (
37 SlicePtrOffset int64
38 SliceLenOffset int64
39 SliceCapOffset int64
40
41 SliceSize int64
42 StringSize int64
43 )
44
45 var SkipSizeForTracing bool
46
47
48
49 func typePos(t *Type) src.XPos {
50 if pos := t.Pos(); pos.IsKnown() {
51 return pos
52 }
53 base.Fatalf("bad type: %v", t)
54 panic("unreachable")
55 }
56
57
58 var MaxWidth int64
59
60
61
62 var CalcSizeDisabled bool
63
64
65
66 var defercalc int
67
68
69 func RoundUp(o int64, r int64) int64 {
70 if r < 1 || r > 8 || r&(r-1) != 0 {
71 base.Fatalf("Round %d", r)
72 }
73 return (o + r - 1) &^ (r - 1)
74 }
75
76
77
78 func expandiface(t *Type) {
79 seen := make(map[*Sym]*Field)
80 var methods []*Field
81
82 addMethod := func(m *Field, explicit bool) {
83 switch prev := seen[m.Sym]; {
84 case prev == nil:
85 seen[m.Sym] = m
86 case !explicit && Identical(m.Type, prev.Type):
87 return
88 default:
89 base.ErrorfAt(m.Pos, errors.DuplicateDecl, "duplicate method %s", m.Sym.Name)
90 }
91 methods = append(methods, m)
92 }
93
94 {
95 methods := t.Methods()
96 slices.SortStableFunc(methods, func(a, b *Field) int {
97
98 if a.Sym == nil && b.Sym == nil {
99 return CompareSyms(a.Type.Sym(), b.Type.Sym())
100 }
101
102
103 if a.Sym == nil {
104 return -1
105 } else if b.Sym == nil {
106 return +1
107 }
108
109
110 return CompareSyms(a.Sym, b.Sym)
111 })
112 }
113
114 for _, m := range t.Methods() {
115 if m.Sym == nil {
116 continue
117 }
118
119 CheckSize(m.Type)
120 addMethod(m, true)
121 }
122
123 for _, m := range t.Methods() {
124 if m.Sym != nil || m.Type == nil {
125 continue
126 }
127
128
129
130
131 if !m.Type.IsInterface() {
132 continue
133 }
134
135
136
137 for _, t1 := range m.Type.AllMethods() {
138 f := NewField(m.Pos, t1.Sym, t1.Type)
139 addMethod(f, false)
140
141
142 f.Pos = src.NoXPos
143 }
144
145
146 m.Pos = src.NoXPos
147 }
148
149 slices.SortFunc(methods, CompareFields)
150
151 if int64(len(methods)) >= MaxWidth/int64(PtrSize) {
152 base.ErrorfAt(typePos(t), 0, "interface too large")
153 }
154 for i, m := range methods {
155 m.Offset = int64(i) * int64(PtrSize)
156 }
157
158 t.SetAllMethods(methods)
159 }
160
161
162
163
164 func calcStructOffset(t *Type, fields []*Field, offset int64) int64 {
165 for _, f := range fields {
166 CalcSize(f.Type)
167 offset = RoundUp(offset, int64(f.Type.align))
168
169 if t.IsStruct() {
170 f.Offset = offset
171
172
173
174
175
176 if f.Type.NotInHeap() {
177 t.SetNotInHeap(true)
178 }
179 }
180
181 offset += f.Type.width
182
183 maxwidth := MaxWidth
184
185
186 if maxwidth < 1<<32 {
187 maxwidth = 1<<31 - 1
188 }
189 if offset >= maxwidth {
190 base.ErrorfAt(typePos(t), 0, "type %L too large", t)
191 offset = 8
192 }
193 }
194
195 return offset
196 }
197
198 func isAtomicStdPkg(p *Pkg) bool {
199 if p.Prefix == `""` {
200 panic("bad package prefix")
201 }
202 return p.Prefix == "sync/atomic" || p.Prefix == "internal/runtime/atomic"
203 }
204
205
206
207
208
209
210 func CalcSize(t *Type) {
211
212
213 if base.EnableTrace && SkipSizeForTracing {
214 return
215 }
216 if PtrSize == 0 {
217
218 return
219 }
220
221 if t == nil {
222 return
223 }
224
225 if t.width == -2 {
226 t.width = 0
227 t.align = 1
228 base.Fatalf("invalid recursive type %v", t)
229 return
230 }
231
232 if t.widthCalculated() {
233 return
234 }
235
236 if CalcSizeDisabled {
237 base.Fatalf("width not calculated: %v", t)
238 }
239
240
241 DeferCheckSize()
242
243 lno := base.Pos
244 if pos := t.Pos(); pos.IsKnown() {
245 base.Pos = pos
246 }
247
248 t.width = -2
249 t.align = 0
250 t.alg = AMEM
251
252 if t.Noalg() {
253 t.setAlg(ANOALG)
254 }
255
256 et := t.Kind()
257 switch et {
258 case TFUNC, TCHAN, TMAP, TSTRING:
259 break
260
261
262 default:
263 if SimType[t.Kind()] != 0 {
264 et = SimType[t.Kind()]
265 }
266 }
267
268 var w int64
269 switch et {
270 default:
271 base.Fatalf("CalcSize: unknown type: %v", t)
272
273
274 case TINT8, TUINT8, TBOOL:
275
276 w = 1
277 t.intRegs = 1
278
279 case TINT16, TUINT16:
280 w = 2
281 t.intRegs = 1
282
283 case TINT32, TUINT32:
284 w = 4
285 t.intRegs = 1
286
287 case TINT64, TUINT64:
288 w = 8
289 t.align = uint8(RegSize)
290 t.intRegs = uint8(8 / RegSize)
291
292 case TFLOAT32:
293 w = 4
294 t.floatRegs = 1
295 t.setAlg(AFLOAT32)
296
297 case TFLOAT64:
298 w = 8
299 t.align = uint8(RegSize)
300 t.floatRegs = 1
301 t.setAlg(AFLOAT64)
302
303 case TCOMPLEX64:
304 w = 8
305 t.align = 4
306 t.floatRegs = 2
307 t.setAlg(ACPLX64)
308
309 case TCOMPLEX128:
310 w = 16
311 t.align = uint8(RegSize)
312 t.floatRegs = 2
313 t.setAlg(ACPLX128)
314
315 case TPTR:
316 w = int64(PtrSize)
317 t.intRegs = 1
318 CheckSize(t.Elem())
319 t.ptrBytes = int64(PtrSize)
320
321 case TUNSAFEPTR:
322 w = int64(PtrSize)
323 t.intRegs = 1
324 t.ptrBytes = int64(PtrSize)
325
326 case TINTER:
327 w = 2 * int64(PtrSize)
328 t.align = uint8(PtrSize)
329 t.intRegs = 2
330 expandiface(t)
331 if len(t.allMethods.Slice()) == 0 {
332 t.setAlg(ANILINTER)
333 } else {
334 t.setAlg(AINTER)
335 }
336 t.ptrBytes = int64(2 * PtrSize)
337
338 case TCHAN:
339 w = int64(PtrSize)
340 t.intRegs = 1
341 t.ptrBytes = int64(PtrSize)
342
343 CheckSize(t.Elem())
344
345
346
347 t1 := NewChanArgs(t)
348 CheckSize(t1)
349
350 case TCHANARGS:
351 t1 := t.ChanArgs()
352 CalcSize(t1)
353
354
355
356
357 CalcSize(t1.Elem())
358 if t1.Elem().width >= 1<<16 {
359 base.Errorf("channel element type too large (>64kB)")
360 }
361 w = 1
362
363 case TMAP:
364 w = int64(PtrSize)
365 t.intRegs = 1
366 CheckSize(t.Elem())
367 CheckSize(t.Key())
368 t.setAlg(ANOEQ)
369 t.ptrBytes = int64(PtrSize)
370
371 case TFORW:
372 base.Fatalf("invalid recursive type %v", t)
373
374 case TANY:
375 base.Fatalf("CalcSize any")
376
377 case TSTRING:
378 if StringSize == 0 {
379 base.Fatalf("early CalcSize string")
380 }
381 w = StringSize
382 t.align = uint8(PtrSize)
383 t.intRegs = 2
384 t.setAlg(ASTRING)
385 t.ptrBytes = int64(PtrSize)
386
387 case TARRAY:
388 if t.Elem() == nil {
389 break
390 }
391 CalcArraySize(t)
392 w = t.width
393
394 case TSLICE:
395 if t.Elem() == nil {
396 break
397 }
398 w = SliceSize
399 CheckSize(t.Elem())
400 t.align = uint8(PtrSize)
401 t.intRegs = 3
402 t.setAlg(ANOEQ)
403 if !t.Elem().NotInHeap() {
404 t.ptrBytes = int64(PtrSize)
405 }
406
407 case TSTRUCT:
408 if t.IsFuncArgStruct() {
409 base.Fatalf("CalcSize fn struct %v", t)
410 }
411 CalcStructSize(t)
412 w = t.width
413
414
415
416 case TFUNC:
417 t1 := NewFuncArgs(t)
418 CheckSize(t1)
419 w = int64(PtrSize)
420 t.intRegs = 1
421 t.setAlg(ANOEQ)
422 t.ptrBytes = int64(PtrSize)
423
424
425
426 case TFUNCARGS:
427 t1 := t.FuncArgs()
428
429 w = calcStructOffset(t1, t1.Recvs(), 0)
430 w = calcStructOffset(t1, t1.Params(), w)
431 w = RoundUp(w, int64(RegSize))
432 w = calcStructOffset(t1, t1.Results(), w)
433 w = RoundUp(w, int64(RegSize))
434 t1.extra.(*Func).Argwid = w
435 t.align = 1
436 }
437
438 if PtrSize == 4 && w != int64(int32(w)) {
439 base.Errorf("type %v too large", t)
440 }
441
442 t.width = w
443 if t.align == 0 {
444 if w == 0 || w > 8 || w&(w-1) != 0 {
445 base.Fatalf("invalid alignment for %v", t)
446 }
447 t.align = uint8(w)
448 }
449
450 base.Pos = lno
451
452 ResumeCheckSize()
453 }
454
455
456
457
458 func CalcStructSize(t *Type) {
459 var maxAlign uint8 = 1
460
461
462
463 if sym := t.Sym(); sym != nil {
464 switch {
465 case sym.Name == "align64" && isAtomicStdPkg(sym.Pkg):
466 maxAlign = 8
467 }
468 }
469
470 fields := t.Fields()
471 size := calcStructOffset(t, fields, 0)
472
473
474
475
476
477 if size > 0 && fields[len(fields)-1].Type.width == 0 {
478 size++
479 }
480
481 var intRegs, floatRegs uint64
482 for _, field := range fields {
483 typ := field.Type
484
485
486
487 if align := typ.align; align > maxAlign {
488 maxAlign = align
489 }
490
491
492
493 intRegs += uint64(typ.intRegs)
494 floatRegs += uint64(typ.floatRegs)
495 }
496
497
498 size = RoundUp(size, int64(maxAlign))
499
500 if intRegs > math.MaxUint8 || floatRegs > math.MaxUint8 {
501 intRegs = math.MaxUint8
502 floatRegs = math.MaxUint8
503 }
504
505 t.width = size
506 t.align = maxAlign
507 t.intRegs = uint8(intRegs)
508 t.floatRegs = uint8(floatRegs)
509
510
511 t.alg = AMEM
512 if t.Noalg() {
513 t.setAlg(ANOALG)
514 }
515 if len(fields) == 1 && !fields[0].Sym.IsBlank() {
516
517 t.setAlg(fields[0].Type.alg)
518 } else {
519 for i, f := range fields {
520 a := f.Type.alg
521 switch a {
522 case ANOEQ, ANOALG:
523 case AMEM:
524
525 if f.Sym.IsBlank() || IsPaddedField(t, i) {
526 a = ASPECIAL
527 }
528 default:
529
530 a = ASPECIAL
531 }
532 t.setAlg(a)
533 }
534 }
535
536 for i := len(fields) - 1; i >= 0; i-- {
537 f := fields[i]
538 if size := PtrDataSize(f.Type); size > 0 {
539 t.ptrBytes = f.Offset + size
540 break
541 }
542 }
543 }
544
545
546
547
548 func CalcArraySize(t *Type) {
549 elem := t.Elem()
550 n := t.NumElem()
551 CalcSize(elem)
552 t.SetNotInHeap(elem.NotInHeap())
553 if elem.width != 0 {
554 cap := (uint64(MaxWidth) - 1) / uint64(elem.width)
555 if uint64(n) > cap {
556 base.Errorf("type %L larger than address space", t)
557 }
558 }
559
560 t.width = elem.width * n
561 t.align = elem.align
562
563
564 switch n {
565 case 0:
566 t.intRegs = 0
567 t.floatRegs = 0
568 case 1:
569 t.intRegs = elem.intRegs
570 t.floatRegs = elem.floatRegs
571 default:
572 t.intRegs = math.MaxUint8
573 t.floatRegs = math.MaxUint8
574 }
575 t.alg = AMEM
576 if t.Noalg() {
577 t.setAlg(ANOALG)
578 }
579 switch a := elem.alg; a {
580 case AMEM, ANOEQ, ANOALG:
581 t.setAlg(a)
582 default:
583 switch n {
584 case 0:
585
586 t.setAlg(AMEM)
587 case 1:
588
589 t.setAlg(a)
590 default:
591 t.setAlg(ASPECIAL)
592 }
593 }
594 if n > 0 {
595 x := PtrDataSize(elem)
596 if x > 0 {
597 t.ptrBytes = elem.width*(n-1) + x
598 }
599 }
600 }
601
602 func (t *Type) widthCalculated() bool {
603 return t.align > 0
604 }
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622 var deferredTypeStack []*Type
623
624 func CheckSize(t *Type) {
625 if t == nil {
626 return
627 }
628
629
630
631 if t.IsFuncArgStruct() {
632 base.Fatalf("CheckSize %v", t)
633 }
634
635 if defercalc == 0 {
636 CalcSize(t)
637 return
638 }
639
640
641 if !t.Deferwidth() {
642 t.SetDeferwidth(true)
643 deferredTypeStack = append(deferredTypeStack, t)
644 }
645 }
646
647 func DeferCheckSize() {
648 defercalc++
649 }
650
651 func ResumeCheckSize() {
652 if defercalc == 1 {
653 for len(deferredTypeStack) > 0 {
654 t := deferredTypeStack[len(deferredTypeStack)-1]
655 deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
656 t.SetDeferwidth(false)
657 CalcSize(t)
658 }
659 }
660
661 defercalc--
662 }
663
664
665
666
667
668
669 func PtrDataSize(t *Type) int64 {
670 CalcSize(t)
671 x := t.ptrBytes
672 if t.Kind() == TPTR && t.Elem().NotInHeap() {
673
674
675
676 x = 0
677 }
678 return x
679 }
680
View as plain text