Source file
src/reflect/type.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 package reflect
17
18 import (
19 "internal/abi"
20 "internal/goarch"
21 "strconv"
22 "sync"
23 "unicode"
24 "unicode/utf8"
25 "unsafe"
26 )
27
28
29
30
31
32
33
34
35
36
37
38
39 type Type interface {
40
41
42
43
44 Align() int
45
46
47
48 FieldAlign() int
49
50
51
52
53
54
55
56
57
58
59
60
61 Method(int) Method
62
63
64
65
66
67
68
69
70
71 MethodByName(string) (Method, bool)
72
73
74
75
76
77
78 NumMethod() int
79
80
81
82 Name() string
83
84
85
86
87
88
89 PkgPath() string
90
91
92
93 Size() uintptr
94
95
96
97
98
99
100 String() string
101
102
103 Kind() Kind
104
105
106 Implements(u Type) bool
107
108
109 AssignableTo(u Type) bool
110
111
112
113
114
115 ConvertibleTo(u Type) bool
116
117
118
119
120
121 Comparable() bool
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138 Bits() int
139
140
141
142 ChanDir() ChanDir
143
144
145
146
147
148
149
150
151
152
153
154
155
156 IsVariadic() bool
157
158
159
160 Elem() Type
161
162
163
164
165 Field(i int) StructField
166
167
168
169
170
171 FieldByIndex(index []int) StructField
172
173
174
175
176
177
178 FieldByName(name string) (StructField, bool)
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196 FieldByNameFunc(match func(string) bool) (StructField, bool)
197
198
199
200
201 In(i int) Type
202
203
204
205 Key() Type
206
207
208
209 Len() int
210
211
212
213 NumField() int
214
215
216
217 NumIn() int
218
219
220
221 NumOut() int
222
223
224
225
226 Out(i int) Type
227
228
229
230 OverflowComplex(x complex128) bool
231
232
233
234 OverflowFloat(x float64) bool
235
236
237
238 OverflowInt(x int64) bool
239
240
241
242 OverflowUint(x uint64) bool
243
244 common() *abi.Type
245 uncommon() *uncommonType
246 }
247
248
249
250
251
252
253
254
255
256
261
262
263
264 type Kind uint
265
266 const (
267 Invalid Kind = iota
268 Bool
269 Int
270 Int8
271 Int16
272 Int32
273 Int64
274 Uint
275 Uint8
276 Uint16
277 Uint32
278 Uint64
279 Uintptr
280 Float32
281 Float64
282 Complex64
283 Complex128
284 Array
285 Chan
286 Func
287 Interface
288 Map
289 Pointer
290 Slice
291 String
292 Struct
293 UnsafePointer
294 )
295
296
297 const Ptr = Pointer
298
299
300
301
302
303 type uncommonType = abi.UncommonType
304
305
306 type common struct {
307 abi.Type
308 }
309
310
311
312 type rtype struct {
313 t abi.Type
314 }
315
316 func (t *rtype) common() *abi.Type {
317 return &t.t
318 }
319
320 func (t *rtype) uncommon() *abi.UncommonType {
321 return t.t.Uncommon()
322 }
323
324 type aNameOff = abi.NameOff
325 type aTypeOff = abi.TypeOff
326 type aTextOff = abi.TextOff
327
328
329 type ChanDir int
330
331 const (
332 RecvDir ChanDir = 1 << iota
333 SendDir
334 BothDir = RecvDir | SendDir
335 )
336
337
338 type arrayType = abi.ArrayType
339
340
341 type chanType = abi.ChanType
342
343
344
345
346
347
348
349
350
351
352
353
354 type funcType = abi.FuncType
355
356
357 type interfaceType struct {
358 abi.InterfaceType
359 }
360
361 func (t *interfaceType) nameOff(off aNameOff) abi.Name {
362 return toRType(&t.Type).nameOff(off)
363 }
364
365 func nameOffFor(t *abi.Type, off aNameOff) abi.Name {
366 return toRType(t).nameOff(off)
367 }
368
369 func typeOffFor(t *abi.Type, off aTypeOff) *abi.Type {
370 return toRType(t).typeOff(off)
371 }
372
373 func (t *interfaceType) typeOff(off aTypeOff) *abi.Type {
374 return toRType(&t.Type).typeOff(off)
375 }
376
377 func (t *interfaceType) common() *abi.Type {
378 return &t.Type
379 }
380
381 func (t *interfaceType) uncommon() *abi.UncommonType {
382 return t.Uncommon()
383 }
384
385
386 type mapType struct {
387 abi.MapType
388 }
389
390
391 type ptrType struct {
392 abi.PtrType
393 }
394
395
396 type sliceType struct {
397 abi.SliceType
398 }
399
400
401 type structField = abi.StructField
402
403
404 type structType struct {
405 abi.StructType
406 }
407
408 func pkgPath(n abi.Name) string {
409 if n.Bytes == nil || *n.DataChecked(0, "name flag field")&(1<<2) == 0 {
410 return ""
411 }
412 i, l := n.ReadVarint(1)
413 off := 1 + i + l
414 if n.HasTag() {
415 i2, l2 := n.ReadVarint(off)
416 off += i2 + l2
417 }
418 var nameOff int32
419
420
421 copy((*[4]byte)(unsafe.Pointer(&nameOff))[:], (*[4]byte)(unsafe.Pointer(n.DataChecked(off, "name offset field")))[:])
422 pkgPathName := abi.Name{Bytes: (*byte)(resolveTypeOff(unsafe.Pointer(n.Bytes), nameOff))}
423 return pkgPathName.Name()
424 }
425
426 func newName(n, tag string, exported, embedded bool) abi.Name {
427 return abi.NewName(n, tag, exported, embedded)
428 }
429
430
434
435
436 type Method struct {
437
438 Name string
439
440
441
442
443
444
445 PkgPath string
446
447 Type Type
448 Func Value
449 Index int
450 }
451
452
453 func (m Method) IsExported() bool {
454 return m.PkgPath == ""
455 }
456
457
458 func (k Kind) String() string {
459 if uint(k) < uint(len(kindNames)) {
460 return kindNames[uint(k)]
461 }
462 return "kind" + strconv.Itoa(int(k))
463 }
464
465 var kindNames = []string{
466 Invalid: "invalid",
467 Bool: "bool",
468 Int: "int",
469 Int8: "int8",
470 Int16: "int16",
471 Int32: "int32",
472 Int64: "int64",
473 Uint: "uint",
474 Uint8: "uint8",
475 Uint16: "uint16",
476 Uint32: "uint32",
477 Uint64: "uint64",
478 Uintptr: "uintptr",
479 Float32: "float32",
480 Float64: "float64",
481 Complex64: "complex64",
482 Complex128: "complex128",
483 Array: "array",
484 Chan: "chan",
485 Func: "func",
486 Interface: "interface",
487 Map: "map",
488 Pointer: "ptr",
489 Slice: "slice",
490 String: "string",
491 Struct: "struct",
492 UnsafePointer: "unsafe.Pointer",
493 }
494
495
496
497
498
499
500 func resolveNameOff(ptrInModule unsafe.Pointer, off int32) unsafe.Pointer
501
502
503
504
505
506
507 func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
508
509
510
511
512
513
514 func resolveTextOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
515
516
517
518
519
520
521 func addReflectOff(ptr unsafe.Pointer) int32
522
523
524
525 func resolveReflectName(n abi.Name) aNameOff {
526 return aNameOff(addReflectOff(unsafe.Pointer(n.Bytes)))
527 }
528
529
530
531 func resolveReflectType(t *abi.Type) aTypeOff {
532 return aTypeOff(addReflectOff(unsafe.Pointer(t)))
533 }
534
535
536
537
538 func resolveReflectText(ptr unsafe.Pointer) aTextOff {
539 return aTextOff(addReflectOff(ptr))
540 }
541
542 func (t *rtype) nameOff(off aNameOff) abi.Name {
543 return abi.Name{Bytes: (*byte)(resolveNameOff(unsafe.Pointer(t), int32(off)))}
544 }
545
546 func (t *rtype) typeOff(off aTypeOff) *abi.Type {
547 return (*abi.Type)(resolveTypeOff(unsafe.Pointer(t), int32(off)))
548 }
549
550 func (t *rtype) textOff(off aTextOff) unsafe.Pointer {
551 return resolveTextOff(unsafe.Pointer(t), int32(off))
552 }
553
554 func textOffFor(t *abi.Type, off aTextOff) unsafe.Pointer {
555 return toRType(t).textOff(off)
556 }
557
558 func (t *rtype) String() string {
559 s := t.nameOff(t.t.Str).Name()
560 if t.t.TFlag&abi.TFlagExtraStar != 0 {
561 return s[1:]
562 }
563 return s
564 }
565
566 func (t *rtype) Size() uintptr { return t.t.Size() }
567
568 func (t *rtype) Bits() int {
569 if t == nil {
570 panic("reflect: Bits of nil Type")
571 }
572 k := t.Kind()
573 if k < Int || k > Complex128 {
574 panic("reflect: Bits of non-arithmetic Type " + t.String())
575 }
576 return int(t.t.Size_) * 8
577 }
578
579 func (t *rtype) Align() int { return t.t.Align() }
580
581 func (t *rtype) FieldAlign() int { return t.t.FieldAlign() }
582
583 func (t *rtype) Kind() Kind { return Kind(t.t.Kind()) }
584
585 func (t *rtype) exportedMethods() []abi.Method {
586 ut := t.uncommon()
587 if ut == nil {
588 return nil
589 }
590 return ut.ExportedMethods()
591 }
592
593 func (t *rtype) NumMethod() int {
594 if t.Kind() == Interface {
595 tt := (*interfaceType)(unsafe.Pointer(t))
596 return tt.NumMethod()
597 }
598 return len(t.exportedMethods())
599 }
600
601 func (t *rtype) Method(i int) (m Method) {
602 if t.Kind() == Interface {
603 tt := (*interfaceType)(unsafe.Pointer(t))
604 return tt.Method(i)
605 }
606 methods := t.exportedMethods()
607 if i < 0 || i >= len(methods) {
608 panic("reflect: Method index out of range")
609 }
610 p := methods[i]
611 pname := t.nameOff(p.Name)
612 m.Name = pname.Name()
613 fl := flag(Func)
614 mtyp := t.typeOff(p.Mtyp)
615 ft := (*funcType)(unsafe.Pointer(mtyp))
616 in := make([]Type, 0, 1+ft.NumIn())
617 in = append(in, t)
618 for _, arg := range ft.InSlice() {
619 in = append(in, toRType(arg))
620 }
621 out := make([]Type, 0, ft.NumOut())
622 for _, ret := range ft.OutSlice() {
623 out = append(out, toRType(ret))
624 }
625 mt := FuncOf(in, out, ft.IsVariadic())
626 m.Type = mt
627 tfn := t.textOff(p.Tfn)
628 fn := unsafe.Pointer(&tfn)
629 m.Func = Value{&mt.(*rtype).t, fn, fl}
630
631 m.Index = i
632 return m
633 }
634
635 func (t *rtype) MethodByName(name string) (m Method, ok bool) {
636 if t.Kind() == Interface {
637 tt := (*interfaceType)(unsafe.Pointer(t))
638 return tt.MethodByName(name)
639 }
640 ut := t.uncommon()
641 if ut == nil {
642 return Method{}, false
643 }
644
645 methods := ut.ExportedMethods()
646
647
648
649 i, j := 0, len(methods)
650 for i < j {
651 h := int(uint(i+j) >> 1)
652
653 if !(t.nameOff(methods[h].Name).Name() >= name) {
654 i = h + 1
655 } else {
656 j = h
657 }
658 }
659
660 if i < len(methods) && name == t.nameOff(methods[i].Name).Name() {
661 return t.Method(i), true
662 }
663
664 return Method{}, false
665 }
666
667 func (t *rtype) PkgPath() string {
668 if t.t.TFlag&abi.TFlagNamed == 0 {
669 return ""
670 }
671 ut := t.uncommon()
672 if ut == nil {
673 return ""
674 }
675 return t.nameOff(ut.PkgPath).Name()
676 }
677
678 func pkgPathFor(t *abi.Type) string {
679 return toRType(t).PkgPath()
680 }
681
682 func (t *rtype) Name() string {
683 if !t.t.HasName() {
684 return ""
685 }
686 s := t.String()
687 i := len(s) - 1
688 sqBrackets := 0
689 for i >= 0 && (s[i] != '.' || sqBrackets != 0) {
690 switch s[i] {
691 case ']':
692 sqBrackets++
693 case '[':
694 sqBrackets--
695 }
696 i--
697 }
698 return s[i+1:]
699 }
700
701 func nameFor(t *abi.Type) string {
702 return toRType(t).Name()
703 }
704
705 func (t *rtype) ChanDir() ChanDir {
706 if t.Kind() != Chan {
707 panic("reflect: ChanDir of non-chan type " + t.String())
708 }
709 tt := (*abi.ChanType)(unsafe.Pointer(t))
710 return ChanDir(tt.Dir)
711 }
712
713 func toRType(t *abi.Type) *rtype {
714 return (*rtype)(unsafe.Pointer(t))
715 }
716
717 func elem(t *abi.Type) *abi.Type {
718 et := t.Elem()
719 if et != nil {
720 return et
721 }
722 panic("reflect: Elem of invalid type " + stringFor(t))
723 }
724
725 func (t *rtype) Elem() Type {
726 return toType(elem(t.common()))
727 }
728
729 func (t *rtype) Field(i int) StructField {
730 if t.Kind() != Struct {
731 panic("reflect: Field of non-struct type " + t.String())
732 }
733 tt := (*structType)(unsafe.Pointer(t))
734 return tt.Field(i)
735 }
736
737 func (t *rtype) FieldByIndex(index []int) StructField {
738 if t.Kind() != Struct {
739 panic("reflect: FieldByIndex of non-struct type " + t.String())
740 }
741 tt := (*structType)(unsafe.Pointer(t))
742 return tt.FieldByIndex(index)
743 }
744
745 func (t *rtype) FieldByName(name string) (StructField, bool) {
746 if t.Kind() != Struct {
747 panic("reflect: FieldByName of non-struct type " + t.String())
748 }
749 tt := (*structType)(unsafe.Pointer(t))
750 return tt.FieldByName(name)
751 }
752
753 func (t *rtype) FieldByNameFunc(match func(string) bool) (StructField, bool) {
754 if t.Kind() != Struct {
755 panic("reflect: FieldByNameFunc of non-struct type " + t.String())
756 }
757 tt := (*structType)(unsafe.Pointer(t))
758 return tt.FieldByNameFunc(match)
759 }
760
761 func (t *rtype) Key() Type {
762 if t.Kind() != Map {
763 panic("reflect: Key of non-map type " + t.String())
764 }
765 tt := (*mapType)(unsafe.Pointer(t))
766 return toType(tt.Key)
767 }
768
769 func (t *rtype) Len() int {
770 if t.Kind() != Array {
771 panic("reflect: Len of non-array type " + t.String())
772 }
773 tt := (*arrayType)(unsafe.Pointer(t))
774 return int(tt.Len)
775 }
776
777 func (t *rtype) NumField() int {
778 if t.Kind() != Struct {
779 panic("reflect: NumField of non-struct type " + t.String())
780 }
781 tt := (*structType)(unsafe.Pointer(t))
782 return len(tt.Fields)
783 }
784
785 func (t *rtype) In(i int) Type {
786 if t.Kind() != Func {
787 panic("reflect: In of non-func type " + t.String())
788 }
789 tt := (*abi.FuncType)(unsafe.Pointer(t))
790 return toType(tt.InSlice()[i])
791 }
792
793 func (t *rtype) NumIn() int {
794 if t.Kind() != Func {
795 panic("reflect: NumIn of non-func type " + t.String())
796 }
797 tt := (*abi.FuncType)(unsafe.Pointer(t))
798 return tt.NumIn()
799 }
800
801 func (t *rtype) NumOut() int {
802 if t.Kind() != Func {
803 panic("reflect: NumOut of non-func type " + t.String())
804 }
805 tt := (*abi.FuncType)(unsafe.Pointer(t))
806 return tt.NumOut()
807 }
808
809 func (t *rtype) Out(i int) Type {
810 if t.Kind() != Func {
811 panic("reflect: Out of non-func type " + t.String())
812 }
813 tt := (*abi.FuncType)(unsafe.Pointer(t))
814 return toType(tt.OutSlice()[i])
815 }
816
817 func (t *rtype) IsVariadic() bool {
818 if t.Kind() != Func {
819 panic("reflect: IsVariadic of non-func type " + t.String())
820 }
821 tt := (*abi.FuncType)(unsafe.Pointer(t))
822 return tt.IsVariadic()
823 }
824
825 func (t *rtype) OverflowComplex(x complex128) bool {
826 k := t.Kind()
827 switch k {
828 case Complex64:
829 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
830 case Complex128:
831 return false
832 }
833 panic("reflect: OverflowComplex of non-complex type " + t.String())
834 }
835
836 func (t *rtype) OverflowFloat(x float64) bool {
837 k := t.Kind()
838 switch k {
839 case Float32:
840 return overflowFloat32(x)
841 case Float64:
842 return false
843 }
844 panic("reflect: OverflowFloat of non-float type " + t.String())
845 }
846
847 func (t *rtype) OverflowInt(x int64) bool {
848 k := t.Kind()
849 switch k {
850 case Int, Int8, Int16, Int32, Int64:
851 bitSize := t.Size() * 8
852 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
853 return x != trunc
854 }
855 panic("reflect: OverflowInt of non-int type " + t.String())
856 }
857
858 func (t *rtype) OverflowUint(x uint64) bool {
859 k := t.Kind()
860 switch k {
861 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
862 bitSize := t.Size() * 8
863 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
864 return x != trunc
865 }
866 panic("reflect: OverflowUint of non-uint type " + t.String())
867 }
868
869
870
871
872
873
874
875
876 func add(p unsafe.Pointer, x uintptr, whySafe string) unsafe.Pointer {
877 return unsafe.Pointer(uintptr(p) + x)
878 }
879
880 func (d ChanDir) String() string {
881 switch d {
882 case SendDir:
883 return "chan<-"
884 case RecvDir:
885 return "<-chan"
886 case BothDir:
887 return "chan"
888 }
889 return "ChanDir" + strconv.Itoa(int(d))
890 }
891
892
893 func (t *interfaceType) Method(i int) (m Method) {
894 if i < 0 || i >= len(t.Methods) {
895 return
896 }
897 p := &t.Methods[i]
898 pname := t.nameOff(p.Name)
899 m.Name = pname.Name()
900 if !pname.IsExported() {
901 m.PkgPath = pkgPath(pname)
902 if m.PkgPath == "" {
903 m.PkgPath = t.PkgPath.Name()
904 }
905 }
906 m.Type = toType(t.typeOff(p.Typ))
907 m.Index = i
908 return
909 }
910
911
912 func (t *interfaceType) NumMethod() int { return len(t.Methods) }
913
914
915 func (t *interfaceType) MethodByName(name string) (m Method, ok bool) {
916 if t == nil {
917 return
918 }
919 var p *abi.Imethod
920 for i := range t.Methods {
921 p = &t.Methods[i]
922 if t.nameOff(p.Name).Name() == name {
923 return t.Method(i), true
924 }
925 }
926 return
927 }
928
929
930 type StructField struct {
931
932 Name string
933
934
935
936
937 PkgPath string
938
939 Type Type
940 Tag StructTag
941 Offset uintptr
942 Index []int
943 Anonymous bool
944 }
945
946
947 func (f StructField) IsExported() bool {
948 return f.PkgPath == ""
949 }
950
951
952
953
954
955
956
957
958
959 type StructTag string
960
961
962
963
964
965
966 func (tag StructTag) Get(key string) string {
967 v, _ := tag.Lookup(key)
968 return v
969 }
970
971
972
973
974
975
976
977 func (tag StructTag) Lookup(key string) (value string, ok bool) {
978
979
980
981 for tag != "" {
982
983 i := 0
984 for i < len(tag) && tag[i] == ' ' {
985 i++
986 }
987 tag = tag[i:]
988 if tag == "" {
989 break
990 }
991
992
993
994
995
996 i = 0
997 for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
998 i++
999 }
1000 if i == 0 || i+1 >= len(tag) || tag[i] != ':' || tag[i+1] != '"' {
1001 break
1002 }
1003 name := string(tag[:i])
1004 tag = tag[i+1:]
1005
1006
1007 i = 1
1008 for i < len(tag) && tag[i] != '"' {
1009 if tag[i] == '\\' {
1010 i++
1011 }
1012 i++
1013 }
1014 if i >= len(tag) {
1015 break
1016 }
1017 qvalue := string(tag[:i+1])
1018 tag = tag[i+1:]
1019
1020 if key == name {
1021 value, err := strconv.Unquote(qvalue)
1022 if err != nil {
1023 break
1024 }
1025 return value, true
1026 }
1027 }
1028 return "", false
1029 }
1030
1031
1032 func (t *structType) Field(i int) (f StructField) {
1033 if i < 0 || i >= len(t.Fields) {
1034 panic("reflect: Field index out of bounds")
1035 }
1036 p := &t.Fields[i]
1037 f.Type = toType(p.Typ)
1038 f.Name = p.Name.Name()
1039 f.Anonymous = p.Embedded()
1040 if !p.Name.IsExported() {
1041 f.PkgPath = t.PkgPath.Name()
1042 }
1043 if tag := p.Name.Tag(); tag != "" {
1044 f.Tag = StructTag(tag)
1045 }
1046 f.Offset = p.Offset
1047
1048
1049
1050
1051
1052
1053
1054
1055 f.Index = []int{i}
1056 return
1057 }
1058
1059
1060
1061
1062
1063 func (t *structType) FieldByIndex(index []int) (f StructField) {
1064 f.Type = toType(&t.Type)
1065 for i, x := range index {
1066 if i > 0 {
1067 ft := f.Type
1068 if ft.Kind() == Pointer && ft.Elem().Kind() == Struct {
1069 ft = ft.Elem()
1070 }
1071 f.Type = ft
1072 }
1073 f = f.Type.Field(x)
1074 }
1075 return
1076 }
1077
1078
1079 type fieldScan struct {
1080 typ *structType
1081 index []int
1082 }
1083
1084
1085
1086 func (t *structType) FieldByNameFunc(match func(string) bool) (result StructField, ok bool) {
1087
1088
1089
1090
1091
1092
1093
1094
1095 current := []fieldScan{}
1096 next := []fieldScan{{typ: t}}
1097
1098
1099
1100
1101
1102
1103
1104 var nextCount map[*structType]int
1105
1106
1107
1108
1109
1110
1111 visited := map[*structType]bool{}
1112
1113 for len(next) > 0 {
1114 current, next = next, current[:0]
1115 count := nextCount
1116 nextCount = nil
1117
1118
1119
1120
1121
1122 for _, scan := range current {
1123 t := scan.typ
1124 if visited[t] {
1125
1126
1127
1128 continue
1129 }
1130 visited[t] = true
1131 for i := range t.Fields {
1132 f := &t.Fields[i]
1133
1134 fname := f.Name.Name()
1135 var ntyp *abi.Type
1136 if f.Embedded() {
1137
1138 ntyp = f.Typ
1139 if ntyp.Kind() == abi.Pointer {
1140 ntyp = ntyp.Elem()
1141 }
1142 }
1143
1144
1145 if match(fname) {
1146
1147 if count[t] > 1 || ok {
1148
1149 return StructField{}, false
1150 }
1151 result = t.Field(i)
1152 result.Index = nil
1153 result.Index = append(result.Index, scan.index...)
1154 result.Index = append(result.Index, i)
1155 ok = true
1156 continue
1157 }
1158
1159
1160
1161
1162 if ok || ntyp == nil || ntyp.Kind() != abi.Struct {
1163 continue
1164 }
1165 styp := (*structType)(unsafe.Pointer(ntyp))
1166 if nextCount[styp] > 0 {
1167 nextCount[styp] = 2
1168 continue
1169 }
1170 if nextCount == nil {
1171 nextCount = map[*structType]int{}
1172 }
1173 nextCount[styp] = 1
1174 if count[t] > 1 {
1175 nextCount[styp] = 2
1176 }
1177 var index []int
1178 index = append(index, scan.index...)
1179 index = append(index, i)
1180 next = append(next, fieldScan{styp, index})
1181 }
1182 }
1183 if ok {
1184 break
1185 }
1186 }
1187 return
1188 }
1189
1190
1191
1192 func (t *structType) FieldByName(name string) (f StructField, present bool) {
1193
1194 hasEmbeds := false
1195 if name != "" {
1196 for i := range t.Fields {
1197 tf := &t.Fields[i]
1198 if tf.Name.Name() == name {
1199 return t.Field(i), true
1200 }
1201 if tf.Embedded() {
1202 hasEmbeds = true
1203 }
1204 }
1205 }
1206 if !hasEmbeds {
1207 return
1208 }
1209 return t.FieldByNameFunc(func(s string) bool { return s == name })
1210 }
1211
1212
1213
1214 func TypeOf(i any) Type {
1215 return toType(abi.TypeOf(i))
1216 }
1217
1218
1219 func rtypeOf(i any) *abi.Type {
1220 return abi.TypeOf(i)
1221 }
1222
1223
1224 var ptrMap sync.Map
1225
1226
1227
1228
1229
1230
1231
1232
1233 func PtrTo(t Type) Type { return PointerTo(t) }
1234
1235
1236
1237 func PointerTo(t Type) Type {
1238 return toRType(t.(*rtype).ptrTo())
1239 }
1240
1241 func (t *rtype) ptrTo() *abi.Type {
1242 at := &t.t
1243 if at.PtrToThis != 0 {
1244 return t.typeOff(at.PtrToThis)
1245 }
1246
1247
1248 if pi, ok := ptrMap.Load(t); ok {
1249 return &pi.(*ptrType).Type
1250 }
1251
1252
1253 s := "*" + t.String()
1254 for _, tt := range typesByString(s) {
1255 p := (*ptrType)(unsafe.Pointer(tt))
1256 if p.Elem != &t.t {
1257 continue
1258 }
1259 pi, _ := ptrMap.LoadOrStore(t, p)
1260 return &pi.(*ptrType).Type
1261 }
1262
1263
1264
1265 var iptr any = (*unsafe.Pointer)(nil)
1266 prototype := *(**ptrType)(unsafe.Pointer(&iptr))
1267 pp := *prototype
1268
1269 pp.Str = resolveReflectName(newName(s, "", false, false))
1270 pp.PtrToThis = 0
1271
1272
1273
1274
1275
1276
1277 pp.Hash = fnv1(t.t.Hash, '*')
1278
1279 pp.Elem = at
1280
1281 pi, _ := ptrMap.LoadOrStore(t, &pp)
1282 return &pi.(*ptrType).Type
1283 }
1284
1285 func ptrTo(t *abi.Type) *abi.Type {
1286 return toRType(t).ptrTo()
1287 }
1288
1289
1290 func fnv1(x uint32, list ...byte) uint32 {
1291 for _, b := range list {
1292 x = x*16777619 ^ uint32(b)
1293 }
1294 return x
1295 }
1296
1297 func (t *rtype) Implements(u Type) bool {
1298 if u == nil {
1299 panic("reflect: nil type passed to Type.Implements")
1300 }
1301 if u.Kind() != Interface {
1302 panic("reflect: non-interface type passed to Type.Implements")
1303 }
1304 return implements(u.common(), t.common())
1305 }
1306
1307 func (t *rtype) AssignableTo(u Type) bool {
1308 if u == nil {
1309 panic("reflect: nil type passed to Type.AssignableTo")
1310 }
1311 uu := u.common()
1312 return directlyAssignable(uu, t.common()) || implements(uu, t.common())
1313 }
1314
1315 func (t *rtype) ConvertibleTo(u Type) bool {
1316 if u == nil {
1317 panic("reflect: nil type passed to Type.ConvertibleTo")
1318 }
1319 return convertOp(u.common(), t.common()) != nil
1320 }
1321
1322 func (t *rtype) Comparable() bool {
1323 return t.t.Equal != nil
1324 }
1325
1326
1327 func implements(T, V *abi.Type) bool {
1328 if T.Kind() != abi.Interface {
1329 return false
1330 }
1331 t := (*interfaceType)(unsafe.Pointer(T))
1332 if len(t.Methods) == 0 {
1333 return true
1334 }
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348 if V.Kind() == abi.Interface {
1349 v := (*interfaceType)(unsafe.Pointer(V))
1350 i := 0
1351 for j := 0; j < len(v.Methods); j++ {
1352 tm := &t.Methods[i]
1353 tmName := t.nameOff(tm.Name)
1354 vm := &v.Methods[j]
1355 vmName := nameOffFor(V, vm.Name)
1356 if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Typ) == t.typeOff(tm.Typ) {
1357 if !tmName.IsExported() {
1358 tmPkgPath := pkgPath(tmName)
1359 if tmPkgPath == "" {
1360 tmPkgPath = t.PkgPath.Name()
1361 }
1362 vmPkgPath := pkgPath(vmName)
1363 if vmPkgPath == "" {
1364 vmPkgPath = v.PkgPath.Name()
1365 }
1366 if tmPkgPath != vmPkgPath {
1367 continue
1368 }
1369 }
1370 if i++; i >= len(t.Methods) {
1371 return true
1372 }
1373 }
1374 }
1375 return false
1376 }
1377
1378 v := V.Uncommon()
1379 if v == nil {
1380 return false
1381 }
1382 i := 0
1383 vmethods := v.Methods()
1384 for j := 0; j < int(v.Mcount); j++ {
1385 tm := &t.Methods[i]
1386 tmName := t.nameOff(tm.Name)
1387 vm := vmethods[j]
1388 vmName := nameOffFor(V, vm.Name)
1389 if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Mtyp) == t.typeOff(tm.Typ) {
1390 if !tmName.IsExported() {
1391 tmPkgPath := pkgPath(tmName)
1392 if tmPkgPath == "" {
1393 tmPkgPath = t.PkgPath.Name()
1394 }
1395 vmPkgPath := pkgPath(vmName)
1396 if vmPkgPath == "" {
1397 vmPkgPath = nameOffFor(V, v.PkgPath).Name()
1398 }
1399 if tmPkgPath != vmPkgPath {
1400 continue
1401 }
1402 }
1403 if i++; i >= len(t.Methods) {
1404 return true
1405 }
1406 }
1407 }
1408 return false
1409 }
1410
1411
1412
1413
1414
1415 func specialChannelAssignability(T, V *abi.Type) bool {
1416
1417
1418
1419
1420 return V.ChanDir() == abi.BothDir && (nameFor(T) == "" || nameFor(V) == "") && haveIdenticalType(T.Elem(), V.Elem(), true)
1421 }
1422
1423
1424
1425
1426
1427
1428 func directlyAssignable(T, V *abi.Type) bool {
1429
1430 if T == V {
1431 return true
1432 }
1433
1434
1435
1436 if T.HasName() && V.HasName() || T.Kind() != V.Kind() {
1437 return false
1438 }
1439
1440 if T.Kind() == abi.Chan && specialChannelAssignability(T, V) {
1441 return true
1442 }
1443
1444
1445 return haveIdenticalUnderlyingType(T, V, true)
1446 }
1447
1448 func haveIdenticalType(T, V *abi.Type, cmpTags bool) bool {
1449 if cmpTags {
1450 return T == V
1451 }
1452
1453 if nameFor(T) != nameFor(V) || T.Kind() != V.Kind() || pkgPathFor(T) != pkgPathFor(V) {
1454 return false
1455 }
1456
1457 return haveIdenticalUnderlyingType(T, V, false)
1458 }
1459
1460 func haveIdenticalUnderlyingType(T, V *abi.Type, cmpTags bool) bool {
1461 if T == V {
1462 return true
1463 }
1464
1465 kind := Kind(T.Kind())
1466 if kind != Kind(V.Kind()) {
1467 return false
1468 }
1469
1470
1471
1472 if Bool <= kind && kind <= Complex128 || kind == String || kind == UnsafePointer {
1473 return true
1474 }
1475
1476
1477 switch kind {
1478 case Array:
1479 return T.Len() == V.Len() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1480
1481 case Chan:
1482 return V.ChanDir() == T.ChanDir() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1483
1484 case Func:
1485 t := (*funcType)(unsafe.Pointer(T))
1486 v := (*funcType)(unsafe.Pointer(V))
1487 if t.OutCount != v.OutCount || t.InCount != v.InCount {
1488 return false
1489 }
1490 for i := 0; i < t.NumIn(); i++ {
1491 if !haveIdenticalType(t.In(i), v.In(i), cmpTags) {
1492 return false
1493 }
1494 }
1495 for i := 0; i < t.NumOut(); i++ {
1496 if !haveIdenticalType(t.Out(i), v.Out(i), cmpTags) {
1497 return false
1498 }
1499 }
1500 return true
1501
1502 case Interface:
1503 t := (*interfaceType)(unsafe.Pointer(T))
1504 v := (*interfaceType)(unsafe.Pointer(V))
1505 if len(t.Methods) == 0 && len(v.Methods) == 0 {
1506 return true
1507 }
1508
1509
1510 return false
1511
1512 case Map:
1513 return haveIdenticalType(T.Key(), V.Key(), cmpTags) && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1514
1515 case Pointer, Slice:
1516 return haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1517
1518 case Struct:
1519 t := (*structType)(unsafe.Pointer(T))
1520 v := (*structType)(unsafe.Pointer(V))
1521 if len(t.Fields) != len(v.Fields) {
1522 return false
1523 }
1524 if t.PkgPath.Name() != v.PkgPath.Name() {
1525 return false
1526 }
1527 for i := range t.Fields {
1528 tf := &t.Fields[i]
1529 vf := &v.Fields[i]
1530 if tf.Name.Name() != vf.Name.Name() {
1531 return false
1532 }
1533 if !haveIdenticalType(tf.Typ, vf.Typ, cmpTags) {
1534 return false
1535 }
1536 if cmpTags && tf.Name.Tag() != vf.Name.Tag() {
1537 return false
1538 }
1539 if tf.Offset != vf.Offset {
1540 return false
1541 }
1542 if tf.Embedded() != vf.Embedded() {
1543 return false
1544 }
1545 }
1546 return true
1547 }
1548
1549 return false
1550 }
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571 func typelinks() (sections []unsafe.Pointer, offset [][]int32)
1572
1573 func rtypeOff(section unsafe.Pointer, off int32) *abi.Type {
1574 return (*abi.Type)(add(section, uintptr(off), "sizeof(rtype) > 0"))
1575 }
1576
1577
1578
1579
1580
1581 func typesByString(s string) []*abi.Type {
1582 sections, offset := typelinks()
1583 var ret []*abi.Type
1584
1585 for offsI, offs := range offset {
1586 section := sections[offsI]
1587
1588
1589
1590 i, j := 0, len(offs)
1591 for i < j {
1592 h := int(uint(i+j) >> 1)
1593
1594 if !(stringFor(rtypeOff(section, offs[h])) >= s) {
1595 i = h + 1
1596 } else {
1597 j = h
1598 }
1599 }
1600
1601
1602
1603
1604
1605 for j := i; j < len(offs); j++ {
1606 typ := rtypeOff(section, offs[j])
1607 if stringFor(typ) != s {
1608 break
1609 }
1610 ret = append(ret, typ)
1611 }
1612 }
1613 return ret
1614 }
1615
1616
1617 var lookupCache sync.Map
1618
1619
1620
1621
1622 type cacheKey struct {
1623 kind Kind
1624 t1 *abi.Type
1625 t2 *abi.Type
1626 extra uintptr
1627 }
1628
1629
1630
1631
1632 var funcLookupCache struct {
1633 sync.Mutex
1634
1635
1636
1637 m sync.Map
1638 }
1639
1640
1641
1642
1643
1644
1645 func ChanOf(dir ChanDir, t Type) Type {
1646 typ := t.common()
1647
1648
1649 ckey := cacheKey{Chan, typ, nil, uintptr(dir)}
1650 if ch, ok := lookupCache.Load(ckey); ok {
1651 return ch.(*rtype)
1652 }
1653
1654
1655 if typ.Size_ >= 1<<16 {
1656 panic("reflect.ChanOf: element size too large")
1657 }
1658
1659
1660 var s string
1661 switch dir {
1662 default:
1663 panic("reflect.ChanOf: invalid dir")
1664 case SendDir:
1665 s = "chan<- " + stringFor(typ)
1666 case RecvDir:
1667 s = "<-chan " + stringFor(typ)
1668 case BothDir:
1669 typeStr := stringFor(typ)
1670 if typeStr[0] == '<' {
1671
1672
1673
1674
1675 s = "chan (" + typeStr + ")"
1676 } else {
1677 s = "chan " + typeStr
1678 }
1679 }
1680 for _, tt := range typesByString(s) {
1681 ch := (*chanType)(unsafe.Pointer(tt))
1682 if ch.Elem == typ && ch.Dir == abi.ChanDir(dir) {
1683 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
1684 return ti.(Type)
1685 }
1686 }
1687
1688
1689 var ichan any = (chan unsafe.Pointer)(nil)
1690 prototype := *(**chanType)(unsafe.Pointer(&ichan))
1691 ch := *prototype
1692 ch.TFlag = abi.TFlagRegularMemory
1693 ch.Dir = abi.ChanDir(dir)
1694 ch.Str = resolveReflectName(newName(s, "", false, false))
1695 ch.Hash = fnv1(typ.Hash, 'c', byte(dir))
1696 ch.Elem = typ
1697
1698 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&ch.Type))
1699 return ti.(Type)
1700 }
1701
1702
1703
1704
1705
1706
1707
1708 func MapOf(key, elem Type) Type {
1709 ktyp := key.common()
1710 etyp := elem.common()
1711
1712 if ktyp.Equal == nil {
1713 panic("reflect.MapOf: invalid key type " + stringFor(ktyp))
1714 }
1715
1716
1717 ckey := cacheKey{Map, ktyp, etyp, 0}
1718 if mt, ok := lookupCache.Load(ckey); ok {
1719 return mt.(Type)
1720 }
1721
1722
1723 s := "map[" + stringFor(ktyp) + "]" + stringFor(etyp)
1724 for _, tt := range typesByString(s) {
1725 mt := (*mapType)(unsafe.Pointer(tt))
1726 if mt.Key == ktyp && mt.Elem == etyp {
1727 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
1728 return ti.(Type)
1729 }
1730 }
1731
1732
1733
1734
1735 var imap any = (map[unsafe.Pointer]unsafe.Pointer)(nil)
1736 mt := **(**mapType)(unsafe.Pointer(&imap))
1737 mt.Str = resolveReflectName(newName(s, "", false, false))
1738 mt.TFlag = 0
1739 mt.Hash = fnv1(etyp.Hash, 'm', byte(ktyp.Hash>>24), byte(ktyp.Hash>>16), byte(ktyp.Hash>>8), byte(ktyp.Hash))
1740 mt.Key = ktyp
1741 mt.Elem = etyp
1742 mt.Bucket = bucketOf(ktyp, etyp)
1743 mt.Hasher = func(p unsafe.Pointer, seed uintptr) uintptr {
1744 return typehash(ktyp, p, seed)
1745 }
1746 mt.Flags = 0
1747 if ktyp.Size_ > abi.MapMaxKeyBytes {
1748 mt.KeySize = uint8(goarch.PtrSize)
1749 mt.Flags |= 1
1750 } else {
1751 mt.KeySize = uint8(ktyp.Size_)
1752 }
1753 if etyp.Size_ > abi.MapMaxElemBytes {
1754 mt.ValueSize = uint8(goarch.PtrSize)
1755 mt.Flags |= 2
1756 } else {
1757 mt.MapType.ValueSize = uint8(etyp.Size_)
1758 }
1759 mt.MapType.BucketSize = uint16(mt.Bucket.Size_)
1760 if isReflexive(ktyp) {
1761 mt.Flags |= 4
1762 }
1763 if needKeyUpdate(ktyp) {
1764 mt.Flags |= 8
1765 }
1766 if hashMightPanic(ktyp) {
1767 mt.Flags |= 16
1768 }
1769 mt.PtrToThis = 0
1770
1771 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&mt.Type))
1772 return ti.(Type)
1773 }
1774
1775 var funcTypes []Type
1776 var funcTypesMutex sync.Mutex
1777
1778 func initFuncTypes(n int) Type {
1779 funcTypesMutex.Lock()
1780 defer funcTypesMutex.Unlock()
1781 if n >= len(funcTypes) {
1782 newFuncTypes := make([]Type, n+1)
1783 copy(newFuncTypes, funcTypes)
1784 funcTypes = newFuncTypes
1785 }
1786 if funcTypes[n] != nil {
1787 return funcTypes[n]
1788 }
1789
1790 funcTypes[n] = StructOf([]StructField{
1791 {
1792 Name: "FuncType",
1793 Type: TypeOf(funcType{}),
1794 },
1795 {
1796 Name: "Args",
1797 Type: ArrayOf(n, TypeOf(&rtype{})),
1798 },
1799 })
1800 return funcTypes[n]
1801 }
1802
1803
1804
1805
1806
1807
1808
1809
1810 func FuncOf(in, out []Type, variadic bool) Type {
1811 if variadic && (len(in) == 0 || in[len(in)-1].Kind() != Slice) {
1812 panic("reflect.FuncOf: last arg of variadic func must be slice")
1813 }
1814
1815
1816 var ifunc any = (func())(nil)
1817 prototype := *(**funcType)(unsafe.Pointer(&ifunc))
1818 n := len(in) + len(out)
1819
1820 if n > 128 {
1821 panic("reflect.FuncOf: too many arguments")
1822 }
1823
1824 o := New(initFuncTypes(n)).Elem()
1825 ft := (*funcType)(unsafe.Pointer(o.Field(0).Addr().Pointer()))
1826 args := unsafe.Slice((**rtype)(unsafe.Pointer(o.Field(1).Addr().Pointer())), n)[0:0:n]
1827 *ft = *prototype
1828
1829
1830 var hash uint32
1831 for _, in := range in {
1832 t := in.(*rtype)
1833 args = append(args, t)
1834 hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash))
1835 }
1836 if variadic {
1837 hash = fnv1(hash, 'v')
1838 }
1839 hash = fnv1(hash, '.')
1840 for _, out := range out {
1841 t := out.(*rtype)
1842 args = append(args, t)
1843 hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash))
1844 }
1845
1846 ft.TFlag = 0
1847 ft.Hash = hash
1848 ft.InCount = uint16(len(in))
1849 ft.OutCount = uint16(len(out))
1850 if variadic {
1851 ft.OutCount |= 1 << 15
1852 }
1853
1854
1855 if ts, ok := funcLookupCache.m.Load(hash); ok {
1856 for _, t := range ts.([]*abi.Type) {
1857 if haveIdenticalUnderlyingType(&ft.Type, t, true) {
1858 return toRType(t)
1859 }
1860 }
1861 }
1862
1863
1864 funcLookupCache.Lock()
1865 defer funcLookupCache.Unlock()
1866 if ts, ok := funcLookupCache.m.Load(hash); ok {
1867 for _, t := range ts.([]*abi.Type) {
1868 if haveIdenticalUnderlyingType(&ft.Type, t, true) {
1869 return toRType(t)
1870 }
1871 }
1872 }
1873
1874 addToCache := func(tt *abi.Type) Type {
1875 var rts []*abi.Type
1876 if rti, ok := funcLookupCache.m.Load(hash); ok {
1877 rts = rti.([]*abi.Type)
1878 }
1879 funcLookupCache.m.Store(hash, append(rts, tt))
1880 return toType(tt)
1881 }
1882
1883
1884 str := funcStr(ft)
1885 for _, tt := range typesByString(str) {
1886 if haveIdenticalUnderlyingType(&ft.Type, tt, true) {
1887 return addToCache(tt)
1888 }
1889 }
1890
1891
1892 ft.Str = resolveReflectName(newName(str, "", false, false))
1893 ft.PtrToThis = 0
1894 return addToCache(&ft.Type)
1895 }
1896 func stringFor(t *abi.Type) string {
1897 return toRType(t).String()
1898 }
1899
1900
1901 func funcStr(ft *funcType) string {
1902 repr := make([]byte, 0, 64)
1903 repr = append(repr, "func("...)
1904 for i, t := range ft.InSlice() {
1905 if i > 0 {
1906 repr = append(repr, ", "...)
1907 }
1908 if ft.IsVariadic() && i == int(ft.InCount)-1 {
1909 repr = append(repr, "..."...)
1910 repr = append(repr, stringFor((*sliceType)(unsafe.Pointer(t)).Elem)...)
1911 } else {
1912 repr = append(repr, stringFor(t)...)
1913 }
1914 }
1915 repr = append(repr, ')')
1916 out := ft.OutSlice()
1917 if len(out) == 1 {
1918 repr = append(repr, ' ')
1919 } else if len(out) > 1 {
1920 repr = append(repr, " ("...)
1921 }
1922 for i, t := range out {
1923 if i > 0 {
1924 repr = append(repr, ", "...)
1925 }
1926 repr = append(repr, stringFor(t)...)
1927 }
1928 if len(out) > 1 {
1929 repr = append(repr, ')')
1930 }
1931 return string(repr)
1932 }
1933
1934
1935
1936 func isReflexive(t *abi.Type) bool {
1937 switch Kind(t.Kind()) {
1938 case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, String, UnsafePointer:
1939 return true
1940 case Float32, Float64, Complex64, Complex128, Interface:
1941 return false
1942 case Array:
1943 tt := (*arrayType)(unsafe.Pointer(t))
1944 return isReflexive(tt.Elem)
1945 case Struct:
1946 tt := (*structType)(unsafe.Pointer(t))
1947 for _, f := range tt.Fields {
1948 if !isReflexive(f.Typ) {
1949 return false
1950 }
1951 }
1952 return true
1953 default:
1954
1955 panic("isReflexive called on non-key type " + stringFor(t))
1956 }
1957 }
1958
1959
1960 func needKeyUpdate(t *abi.Type) bool {
1961 switch Kind(t.Kind()) {
1962 case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, UnsafePointer:
1963 return false
1964 case Float32, Float64, Complex64, Complex128, Interface, String:
1965
1966
1967
1968 return true
1969 case Array:
1970 tt := (*arrayType)(unsafe.Pointer(t))
1971 return needKeyUpdate(tt.Elem)
1972 case Struct:
1973 tt := (*structType)(unsafe.Pointer(t))
1974 for _, f := range tt.Fields {
1975 if needKeyUpdate(f.Typ) {
1976 return true
1977 }
1978 }
1979 return false
1980 default:
1981
1982 panic("needKeyUpdate called on non-key type " + stringFor(t))
1983 }
1984 }
1985
1986
1987 func hashMightPanic(t *abi.Type) bool {
1988 switch Kind(t.Kind()) {
1989 case Interface:
1990 return true
1991 case Array:
1992 tt := (*arrayType)(unsafe.Pointer(t))
1993 return hashMightPanic(tt.Elem)
1994 case Struct:
1995 tt := (*structType)(unsafe.Pointer(t))
1996 for _, f := range tt.Fields {
1997 if hashMightPanic(f.Typ) {
1998 return true
1999 }
2000 }
2001 return false
2002 default:
2003 return false
2004 }
2005 }
2006
2007 func bucketOf(ktyp, etyp *abi.Type) *abi.Type {
2008 if ktyp.Size_ > abi.MapMaxKeyBytes {
2009 ktyp = ptrTo(ktyp)
2010 }
2011 if etyp.Size_ > abi.MapMaxElemBytes {
2012 etyp = ptrTo(etyp)
2013 }
2014
2015
2016
2017
2018
2019
2020 var gcdata *byte
2021 var ptrdata uintptr
2022
2023 size := abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize
2024 if size&uintptr(ktyp.Align_-1) != 0 || size&uintptr(etyp.Align_-1) != 0 {
2025 panic("reflect: bad size computation in MapOf")
2026 }
2027
2028 if ktyp.Pointers() || etyp.Pointers() {
2029 nptr := (abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize) / goarch.PtrSize
2030 n := (nptr + 7) / 8
2031
2032
2033 n = (n + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
2034 mask := make([]byte, n)
2035 base := uintptr(abi.MapBucketCount / goarch.PtrSize)
2036
2037 if ktyp.Pointers() {
2038 emitGCMask(mask, base, ktyp, abi.MapBucketCount)
2039 }
2040 base += abi.MapBucketCount * ktyp.Size_ / goarch.PtrSize
2041
2042 if etyp.Pointers() {
2043 emitGCMask(mask, base, etyp, abi.MapBucketCount)
2044 }
2045 base += abi.MapBucketCount * etyp.Size_ / goarch.PtrSize
2046
2047 word := base
2048 mask[word/8] |= 1 << (word % 8)
2049 gcdata = &mask[0]
2050 ptrdata = (word + 1) * goarch.PtrSize
2051
2052
2053 if ptrdata != size {
2054 panic("reflect: bad layout computation in MapOf")
2055 }
2056 }
2057
2058 b := &abi.Type{
2059 Align_: goarch.PtrSize,
2060 Size_: size,
2061 Kind_: abi.Struct,
2062 PtrBytes: ptrdata,
2063 GCData: gcdata,
2064 }
2065 s := "bucket(" + stringFor(ktyp) + "," + stringFor(etyp) + ")"
2066 b.Str = resolveReflectName(newName(s, "", false, false))
2067 return b
2068 }
2069
2070 func (t *rtype) gcSlice(begin, end uintptr) []byte {
2071 return (*[1 << 30]byte)(unsafe.Pointer(t.t.GCData))[begin:end:end]
2072 }
2073
2074
2075
2076 func emitGCMask(out []byte, base uintptr, typ *abi.Type, n uintptr) {
2077 if typ.Kind_&abi.KindGCProg != 0 {
2078 panic("reflect: unexpected GC program")
2079 }
2080 ptrs := typ.PtrBytes / goarch.PtrSize
2081 words := typ.Size_ / goarch.PtrSize
2082 mask := typ.GcSlice(0, (ptrs+7)/8)
2083 for j := uintptr(0); j < ptrs; j++ {
2084 if (mask[j/8]>>(j%8))&1 != 0 {
2085 for i := uintptr(0); i < n; i++ {
2086 k := base + i*words + j
2087 out[k/8] |= 1 << (k % 8)
2088 }
2089 }
2090 }
2091 }
2092
2093
2094
2095 func appendGCProg(dst []byte, typ *abi.Type) []byte {
2096 if typ.Kind_&abi.KindGCProg != 0 {
2097
2098 n := uintptr(*(*uint32)(unsafe.Pointer(typ.GCData)))
2099 prog := typ.GcSlice(4, 4+n-1)
2100 return append(dst, prog...)
2101 }
2102
2103
2104 ptrs := typ.PtrBytes / goarch.PtrSize
2105 mask := typ.GcSlice(0, (ptrs+7)/8)
2106
2107
2108 for ; ptrs > 120; ptrs -= 120 {
2109 dst = append(dst, 120)
2110 dst = append(dst, mask[:15]...)
2111 mask = mask[15:]
2112 }
2113
2114 dst = append(dst, byte(ptrs))
2115 dst = append(dst, mask...)
2116 return dst
2117 }
2118
2119
2120
2121 func SliceOf(t Type) Type {
2122 typ := t.common()
2123
2124
2125 ckey := cacheKey{Slice, typ, nil, 0}
2126 if slice, ok := lookupCache.Load(ckey); ok {
2127 return slice.(Type)
2128 }
2129
2130
2131 s := "[]" + stringFor(typ)
2132 for _, tt := range typesByString(s) {
2133 slice := (*sliceType)(unsafe.Pointer(tt))
2134 if slice.Elem == typ {
2135 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
2136 return ti.(Type)
2137 }
2138 }
2139
2140
2141 var islice any = ([]unsafe.Pointer)(nil)
2142 prototype := *(**sliceType)(unsafe.Pointer(&islice))
2143 slice := *prototype
2144 slice.TFlag = 0
2145 slice.Str = resolveReflectName(newName(s, "", false, false))
2146 slice.Hash = fnv1(typ.Hash, '[')
2147 slice.Elem = typ
2148 slice.PtrToThis = 0
2149
2150 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&slice.Type))
2151 return ti.(Type)
2152 }
2153
2154
2155
2156
2157 var structLookupCache struct {
2158 sync.Mutex
2159
2160
2161
2162 m sync.Map
2163 }
2164
2165 type structTypeUncommon struct {
2166 structType
2167 u uncommonType
2168 }
2169
2170
2171 func isLetter(ch rune) bool {
2172 return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= utf8.RuneSelf && unicode.IsLetter(ch)
2173 }
2174
2175
2176
2177
2178
2179
2180
2181 func isValidFieldName(fieldName string) bool {
2182 for i, c := range fieldName {
2183 if i == 0 && !isLetter(c) {
2184 return false
2185 }
2186
2187 if !(isLetter(c) || unicode.IsDigit(c)) {
2188 return false
2189 }
2190 }
2191
2192 return len(fieldName) > 0
2193 }
2194
2195
2196 func isRegularMemory(t Type) bool {
2197 switch t.Kind() {
2198 case Array:
2199 elem := t.Elem()
2200 if isRegularMemory(elem) {
2201 return true
2202 }
2203 return elem.Comparable() && t.Len() == 0
2204 case Int8, Int16, Int32, Int64, Int, Uint8, Uint16, Uint32, Uint64, Uint, Uintptr, Chan, Pointer, Bool, UnsafePointer:
2205 return true
2206 case Struct:
2207 num := t.NumField()
2208 switch num {
2209 case 0:
2210 return true
2211 case 1:
2212 field := t.Field(0)
2213 if field.Name == "_" {
2214 return false
2215 }
2216 return isRegularMemory(field.Type)
2217 default:
2218 for i := range num {
2219 field := t.Field(i)
2220 if field.Name == "_" || !isRegularMemory(field.Type) || isPaddedField(t, i) {
2221 return false
2222 }
2223 }
2224 return true
2225 }
2226 }
2227 return false
2228 }
2229
2230
2231
2232 func isPaddedField(t Type, i int) bool {
2233 field := t.Field(i)
2234 if i+1 < t.NumField() {
2235 return field.Offset+field.Type.Size() != t.Field(i+1).Offset
2236 }
2237 return field.Offset+field.Type.Size() != t.Size()
2238 }
2239
2240
2241
2242
2243
2244
2245
2246 func StructOf(fields []StructField) Type {
2247 var (
2248 hash = fnv1(0, []byte("struct {")...)
2249 size uintptr
2250 typalign uint8
2251 comparable = true
2252 methods []abi.Method
2253
2254 fs = make([]structField, len(fields))
2255 repr = make([]byte, 0, 64)
2256 fset = map[string]struct{}{}
2257
2258 hasGCProg = false
2259 )
2260
2261 lastzero := uintptr(0)
2262 repr = append(repr, "struct {"...)
2263 pkgpath := ""
2264 for i, field := range fields {
2265 if field.Name == "" {
2266 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no name")
2267 }
2268 if !isValidFieldName(field.Name) {
2269 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has invalid name")
2270 }
2271 if field.Type == nil {
2272 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no type")
2273 }
2274 f, fpkgpath := runtimeStructField(field)
2275 ft := f.Typ
2276 if ft.Kind_&abi.KindGCProg != 0 {
2277 hasGCProg = true
2278 }
2279 if fpkgpath != "" {
2280 if pkgpath == "" {
2281 pkgpath = fpkgpath
2282 } else if pkgpath != fpkgpath {
2283 panic("reflect.Struct: fields with different PkgPath " + pkgpath + " and " + fpkgpath)
2284 }
2285 }
2286
2287
2288 name := f.Name.Name()
2289 hash = fnv1(hash, []byte(name)...)
2290 repr = append(repr, (" " + name)...)
2291 if f.Embedded() {
2292
2293 if f.Typ.Kind() == abi.Pointer {
2294
2295 elem := ft.Elem()
2296 if k := elem.Kind(); k == abi.Pointer || k == abi.Interface {
2297 panic("reflect.StructOf: illegal embedded field type " + stringFor(ft))
2298 }
2299 }
2300
2301 switch Kind(f.Typ.Kind()) {
2302 case Interface:
2303 ift := (*interfaceType)(unsafe.Pointer(ft))
2304 for _, m := range ift.Methods {
2305 if pkgPath(ift.nameOff(m.Name)) != "" {
2306
2307 panic("reflect: embedded interface with unexported method(s) not implemented")
2308 }
2309
2310 fnStub := resolveReflectText(unsafe.Pointer(abi.FuncPCABIInternal(embeddedIfaceMethStub)))
2311 methods = append(methods, abi.Method{
2312 Name: resolveReflectName(ift.nameOff(m.Name)),
2313 Mtyp: resolveReflectType(ift.typeOff(m.Typ)),
2314 Ifn: fnStub,
2315 Tfn: fnStub,
2316 })
2317 }
2318 case Pointer:
2319 ptr := (*ptrType)(unsafe.Pointer(ft))
2320 if unt := ptr.Uncommon(); unt != nil {
2321 if i > 0 && unt.Mcount > 0 {
2322
2323 panic("reflect: embedded type with methods not implemented if type is not first field")
2324 }
2325 if len(fields) > 1 {
2326 panic("reflect: embedded type with methods not implemented if there is more than one field")
2327 }
2328 for _, m := range unt.Methods() {
2329 mname := nameOffFor(ft, m.Name)
2330 if pkgPath(mname) != "" {
2331
2332
2333 panic("reflect: embedded interface with unexported method(s) not implemented")
2334 }
2335 methods = append(methods, abi.Method{
2336 Name: resolveReflectName(mname),
2337 Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)),
2338 Ifn: resolveReflectText(textOffFor(ft, m.Ifn)),
2339 Tfn: resolveReflectText(textOffFor(ft, m.Tfn)),
2340 })
2341 }
2342 }
2343 if unt := ptr.Elem.Uncommon(); unt != nil {
2344 for _, m := range unt.Methods() {
2345 mname := nameOffFor(ft, m.Name)
2346 if pkgPath(mname) != "" {
2347
2348
2349 panic("reflect: embedded interface with unexported method(s) not implemented")
2350 }
2351 methods = append(methods, abi.Method{
2352 Name: resolveReflectName(mname),
2353 Mtyp: resolveReflectType(typeOffFor(ptr.Elem, m.Mtyp)),
2354 Ifn: resolveReflectText(textOffFor(ptr.Elem, m.Ifn)),
2355 Tfn: resolveReflectText(textOffFor(ptr.Elem, m.Tfn)),
2356 })
2357 }
2358 }
2359 default:
2360 if unt := ft.Uncommon(); unt != nil {
2361 if i > 0 && unt.Mcount > 0 {
2362
2363 panic("reflect: embedded type with methods not implemented if type is not first field")
2364 }
2365 if len(fields) > 1 && ft.Kind_&abi.KindDirectIface != 0 {
2366 panic("reflect: embedded type with methods not implemented for non-pointer type")
2367 }
2368 for _, m := range unt.Methods() {
2369 mname := nameOffFor(ft, m.Name)
2370 if pkgPath(mname) != "" {
2371
2372
2373 panic("reflect: embedded interface with unexported method(s) not implemented")
2374 }
2375 methods = append(methods, abi.Method{
2376 Name: resolveReflectName(mname),
2377 Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)),
2378 Ifn: resolveReflectText(textOffFor(ft, m.Ifn)),
2379 Tfn: resolveReflectText(textOffFor(ft, m.Tfn)),
2380 })
2381
2382 }
2383 }
2384 }
2385 }
2386 if _, dup := fset[name]; dup && name != "_" {
2387 panic("reflect.StructOf: duplicate field " + name)
2388 }
2389 fset[name] = struct{}{}
2390
2391 hash = fnv1(hash, byte(ft.Hash>>24), byte(ft.Hash>>16), byte(ft.Hash>>8), byte(ft.Hash))
2392
2393 repr = append(repr, (" " + stringFor(ft))...)
2394 if f.Name.HasTag() {
2395 hash = fnv1(hash, []byte(f.Name.Tag())...)
2396 repr = append(repr, (" " + strconv.Quote(f.Name.Tag()))...)
2397 }
2398 if i < len(fields)-1 {
2399 repr = append(repr, ';')
2400 }
2401
2402 comparable = comparable && (ft.Equal != nil)
2403
2404 offset := align(size, uintptr(ft.Align_))
2405 if offset < size {
2406 panic("reflect.StructOf: struct size would exceed virtual address space")
2407 }
2408 if ft.Align_ > typalign {
2409 typalign = ft.Align_
2410 }
2411 size = offset + ft.Size_
2412 if size < offset {
2413 panic("reflect.StructOf: struct size would exceed virtual address space")
2414 }
2415 f.Offset = offset
2416
2417 if ft.Size_ == 0 {
2418 lastzero = size
2419 }
2420
2421 fs[i] = f
2422 }
2423
2424 if size > 0 && lastzero == size {
2425
2426
2427
2428
2429
2430 size++
2431 if size == 0 {
2432 panic("reflect.StructOf: struct size would exceed virtual address space")
2433 }
2434 }
2435
2436 var typ *structType
2437 var ut *uncommonType
2438
2439 if len(methods) == 0 {
2440 t := new(structTypeUncommon)
2441 typ = &t.structType
2442 ut = &t.u
2443 } else {
2444
2445
2446
2447
2448
2449 tt := New(StructOf([]StructField{
2450 {Name: "S", Type: TypeOf(structType{})},
2451 {Name: "U", Type: TypeOf(uncommonType{})},
2452 {Name: "M", Type: ArrayOf(len(methods), TypeOf(methods[0]))},
2453 }))
2454
2455 typ = (*structType)(tt.Elem().Field(0).Addr().UnsafePointer())
2456 ut = (*uncommonType)(tt.Elem().Field(1).Addr().UnsafePointer())
2457
2458 copy(tt.Elem().Field(2).Slice(0, len(methods)).Interface().([]abi.Method), methods)
2459 }
2460
2461
2462
2463
2464 ut.Mcount = uint16(len(methods))
2465 ut.Xcount = ut.Mcount
2466 ut.Moff = uint32(unsafe.Sizeof(uncommonType{}))
2467
2468 if len(fs) > 0 {
2469 repr = append(repr, ' ')
2470 }
2471 repr = append(repr, '}')
2472 hash = fnv1(hash, '}')
2473 str := string(repr)
2474
2475
2476 s := align(size, uintptr(typalign))
2477 if s < size {
2478 panic("reflect.StructOf: struct size would exceed virtual address space")
2479 }
2480 size = s
2481
2482
2483 var istruct any = struct{}{}
2484 prototype := *(**structType)(unsafe.Pointer(&istruct))
2485 *typ = *prototype
2486 typ.Fields = fs
2487 if pkgpath != "" {
2488 typ.PkgPath = newName(pkgpath, "", false, false)
2489 }
2490
2491
2492 if ts, ok := structLookupCache.m.Load(hash); ok {
2493 for _, st := range ts.([]Type) {
2494 t := st.common()
2495 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2496 return toType(t)
2497 }
2498 }
2499 }
2500
2501
2502 structLookupCache.Lock()
2503 defer structLookupCache.Unlock()
2504 if ts, ok := structLookupCache.m.Load(hash); ok {
2505 for _, st := range ts.([]Type) {
2506 t := st.common()
2507 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2508 return toType(t)
2509 }
2510 }
2511 }
2512
2513 addToCache := func(t Type) Type {
2514 var ts []Type
2515 if ti, ok := structLookupCache.m.Load(hash); ok {
2516 ts = ti.([]Type)
2517 }
2518 structLookupCache.m.Store(hash, append(ts, t))
2519 return t
2520 }
2521
2522
2523 for _, t := range typesByString(str) {
2524 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2525
2526
2527
2528 return addToCache(toType(t))
2529 }
2530 }
2531
2532 typ.Str = resolveReflectName(newName(str, "", false, false))
2533 if isRegularMemory(toType(&typ.Type)) {
2534 typ.TFlag = abi.TFlagRegularMemory
2535 } else {
2536 typ.TFlag = 0
2537 }
2538 typ.Hash = hash
2539 typ.Size_ = size
2540 typ.PtrBytes = typeptrdata(&typ.Type)
2541 typ.Align_ = typalign
2542 typ.FieldAlign_ = typalign
2543 typ.PtrToThis = 0
2544 if len(methods) > 0 {
2545 typ.TFlag |= abi.TFlagUncommon
2546 }
2547
2548 if hasGCProg {
2549 lastPtrField := 0
2550 for i, ft := range fs {
2551 if ft.Typ.Pointers() {
2552 lastPtrField = i
2553 }
2554 }
2555 prog := []byte{0, 0, 0, 0}
2556 var off uintptr
2557 for i, ft := range fs {
2558 if i > lastPtrField {
2559
2560
2561 break
2562 }
2563 if !ft.Typ.Pointers() {
2564
2565 continue
2566 }
2567
2568 if ft.Offset > off {
2569 n := (ft.Offset - off) / goarch.PtrSize
2570 prog = append(prog, 0x01, 0x00)
2571 if n > 1 {
2572 prog = append(prog, 0x81)
2573 prog = appendVarint(prog, n-1)
2574 }
2575 off = ft.Offset
2576 }
2577
2578 prog = appendGCProg(prog, ft.Typ)
2579 off += ft.Typ.PtrBytes
2580 }
2581 prog = append(prog, 0)
2582 *(*uint32)(unsafe.Pointer(&prog[0])) = uint32(len(prog) - 4)
2583 typ.Kind_ |= abi.KindGCProg
2584 typ.GCData = &prog[0]
2585 } else {
2586 typ.Kind_ &^= abi.KindGCProg
2587 bv := new(bitVector)
2588 addTypeBits(bv, 0, &typ.Type)
2589 if len(bv.data) > 0 {
2590 typ.GCData = &bv.data[0]
2591 }
2592 }
2593 typ.Equal = nil
2594 if comparable {
2595 typ.Equal = func(p, q unsafe.Pointer) bool {
2596 for _, ft := range typ.Fields {
2597 pi := add(p, ft.Offset, "&x.field safe")
2598 qi := add(q, ft.Offset, "&x.field safe")
2599 if !ft.Typ.Equal(pi, qi) {
2600 return false
2601 }
2602 }
2603 return true
2604 }
2605 }
2606
2607 switch {
2608 case len(fs) == 1 && !ifaceIndir(fs[0].Typ):
2609
2610 typ.Kind_ |= abi.KindDirectIface
2611 default:
2612 typ.Kind_ &^= abi.KindDirectIface
2613 }
2614
2615 return addToCache(toType(&typ.Type))
2616 }
2617
2618 func embeddedIfaceMethStub() {
2619 panic("reflect: StructOf does not support methods of embedded interfaces")
2620 }
2621
2622
2623
2624
2625 func runtimeStructField(field StructField) (structField, string) {
2626 if field.Anonymous && field.PkgPath != "" {
2627 panic("reflect.StructOf: field \"" + field.Name + "\" is anonymous but has PkgPath set")
2628 }
2629
2630 if field.IsExported() {
2631
2632
2633 c := field.Name[0]
2634 if 'a' <= c && c <= 'z' || c == '_' {
2635 panic("reflect.StructOf: field \"" + field.Name + "\" is unexported but missing PkgPath")
2636 }
2637 }
2638
2639 resolveReflectType(field.Type.common())
2640 f := structField{
2641 Name: newName(field.Name, string(field.Tag), field.IsExported(), field.Anonymous),
2642 Typ: field.Type.common(),
2643 Offset: 0,
2644 }
2645 return f, field.PkgPath
2646 }
2647
2648
2649
2650
2651 func typeptrdata(t *abi.Type) uintptr {
2652 switch t.Kind() {
2653 case abi.Struct:
2654 st := (*structType)(unsafe.Pointer(t))
2655
2656 field := -1
2657 for i := range st.Fields {
2658 ft := st.Fields[i].Typ
2659 if ft.Pointers() {
2660 field = i
2661 }
2662 }
2663 if field == -1 {
2664 return 0
2665 }
2666 f := st.Fields[field]
2667 return f.Offset + f.Typ.PtrBytes
2668
2669 default:
2670 panic("reflect.typeptrdata: unexpected type, " + stringFor(t))
2671 }
2672 }
2673
2674
2675
2676
2677
2678
2679 func ArrayOf(length int, elem Type) Type {
2680 if length < 0 {
2681 panic("reflect: negative length passed to ArrayOf")
2682 }
2683
2684 typ := elem.common()
2685
2686
2687 ckey := cacheKey{Array, typ, nil, uintptr(length)}
2688 if array, ok := lookupCache.Load(ckey); ok {
2689 return array.(Type)
2690 }
2691
2692
2693 s := "[" + strconv.Itoa(length) + "]" + stringFor(typ)
2694 for _, tt := range typesByString(s) {
2695 array := (*arrayType)(unsafe.Pointer(tt))
2696 if array.Elem == typ {
2697 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
2698 return ti.(Type)
2699 }
2700 }
2701
2702
2703 var iarray any = [1]unsafe.Pointer{}
2704 prototype := *(**arrayType)(unsafe.Pointer(&iarray))
2705 array := *prototype
2706 array.TFlag = typ.TFlag & abi.TFlagRegularMemory
2707 array.Str = resolveReflectName(newName(s, "", false, false))
2708 array.Hash = fnv1(typ.Hash, '[')
2709 for n := uint32(length); n > 0; n >>= 8 {
2710 array.Hash = fnv1(array.Hash, byte(n))
2711 }
2712 array.Hash = fnv1(array.Hash, ']')
2713 array.Elem = typ
2714 array.PtrToThis = 0
2715 if typ.Size_ > 0 {
2716 max := ^uintptr(0) / typ.Size_
2717 if uintptr(length) > max {
2718 panic("reflect.ArrayOf: array size would exceed virtual address space")
2719 }
2720 }
2721 array.Size_ = typ.Size_ * uintptr(length)
2722 if length > 0 && typ.Pointers() {
2723 array.PtrBytes = typ.Size_*uintptr(length-1) + typ.PtrBytes
2724 }
2725 array.Align_ = typ.Align_
2726 array.FieldAlign_ = typ.FieldAlign_
2727 array.Len = uintptr(length)
2728 array.Slice = &(SliceOf(elem).(*rtype).t)
2729
2730 switch {
2731 case !typ.Pointers() || array.Size_ == 0:
2732
2733 array.GCData = nil
2734 array.PtrBytes = 0
2735
2736 case length == 1:
2737
2738 array.Kind_ |= typ.Kind_ & abi.KindGCProg
2739 array.GCData = typ.GCData
2740 array.PtrBytes = typ.PtrBytes
2741
2742 case typ.Kind_&abi.KindGCProg == 0 && array.Size_ <= abi.MaxPtrmaskBytes*8*goarch.PtrSize:
2743
2744
2745
2746 n := (array.PtrBytes/goarch.PtrSize + 7) / 8
2747
2748 n = (n + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
2749 mask := make([]byte, n)
2750 emitGCMask(mask, 0, typ, array.Len)
2751 array.GCData = &mask[0]
2752
2753 default:
2754
2755
2756 prog := []byte{0, 0, 0, 0}
2757 prog = appendGCProg(prog, typ)
2758
2759 elemPtrs := typ.PtrBytes / goarch.PtrSize
2760 elemWords := typ.Size_ / goarch.PtrSize
2761 if elemPtrs < elemWords {
2762
2763 prog = append(prog, 0x01, 0x00)
2764 if elemPtrs+1 < elemWords {
2765 prog = append(prog, 0x81)
2766 prog = appendVarint(prog, elemWords-elemPtrs-1)
2767 }
2768 }
2769
2770 if elemWords < 0x80 {
2771 prog = append(prog, byte(elemWords|0x80))
2772 } else {
2773 prog = append(prog, 0x80)
2774 prog = appendVarint(prog, elemWords)
2775 }
2776 prog = appendVarint(prog, uintptr(length)-1)
2777 prog = append(prog, 0)
2778 *(*uint32)(unsafe.Pointer(&prog[0])) = uint32(len(prog) - 4)
2779 array.Kind_ |= abi.KindGCProg
2780 array.GCData = &prog[0]
2781 array.PtrBytes = array.Size_
2782 }
2783
2784 etyp := typ
2785 esize := etyp.Size()
2786
2787 array.Equal = nil
2788 if eequal := etyp.Equal; eequal != nil {
2789 array.Equal = func(p, q unsafe.Pointer) bool {
2790 for i := 0; i < length; i++ {
2791 pi := arrayAt(p, i, esize, "i < length")
2792 qi := arrayAt(q, i, esize, "i < length")
2793 if !eequal(pi, qi) {
2794 return false
2795 }
2796
2797 }
2798 return true
2799 }
2800 }
2801
2802 switch {
2803 case length == 1 && !ifaceIndir(typ):
2804
2805 array.Kind_ |= abi.KindDirectIface
2806 default:
2807 array.Kind_ &^= abi.KindDirectIface
2808 }
2809
2810 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&array.Type))
2811 return ti.(Type)
2812 }
2813
2814 func appendVarint(x []byte, v uintptr) []byte {
2815 for ; v >= 0x80; v >>= 7 {
2816 x = append(x, byte(v|0x80))
2817 }
2818 x = append(x, byte(v))
2819 return x
2820 }
2821
2822
2823
2824
2825
2826
2827 func toType(t *abi.Type) Type {
2828 if t == nil {
2829 return nil
2830 }
2831 return toRType(t)
2832 }
2833
2834 type layoutKey struct {
2835 ftyp *funcType
2836 rcvr *abi.Type
2837 }
2838
2839 type layoutType struct {
2840 t *abi.Type
2841 framePool *sync.Pool
2842 abid abiDesc
2843 }
2844
2845 var layoutCache sync.Map
2846
2847
2848
2849
2850
2851
2852
2853
2854 func funcLayout(t *funcType, rcvr *abi.Type) (frametype *abi.Type, framePool *sync.Pool, abid abiDesc) {
2855 if t.Kind() != abi.Func {
2856 panic("reflect: funcLayout of non-func type " + stringFor(&t.Type))
2857 }
2858 if rcvr != nil && rcvr.Kind() == abi.Interface {
2859 panic("reflect: funcLayout with interface receiver " + stringFor(rcvr))
2860 }
2861 k := layoutKey{t, rcvr}
2862 if lti, ok := layoutCache.Load(k); ok {
2863 lt := lti.(layoutType)
2864 return lt.t, lt.framePool, lt.abid
2865 }
2866
2867
2868 abid = newAbiDesc(t, rcvr)
2869
2870
2871 x := &abi.Type{
2872 Align_: goarch.PtrSize,
2873
2874
2875
2876
2877 Size_: align(abid.retOffset+abid.ret.stackBytes, goarch.PtrSize),
2878 PtrBytes: uintptr(abid.stackPtrs.n) * goarch.PtrSize,
2879 }
2880 if abid.stackPtrs.n > 0 {
2881 x.GCData = &abid.stackPtrs.data[0]
2882 }
2883
2884 var s string
2885 if rcvr != nil {
2886 s = "methodargs(" + stringFor(rcvr) + ")(" + stringFor(&t.Type) + ")"
2887 } else {
2888 s = "funcargs(" + stringFor(&t.Type) + ")"
2889 }
2890 x.Str = resolveReflectName(newName(s, "", false, false))
2891
2892
2893 framePool = &sync.Pool{New: func() any {
2894 return unsafe_New(x)
2895 }}
2896 lti, _ := layoutCache.LoadOrStore(k, layoutType{
2897 t: x,
2898 framePool: framePool,
2899 abid: abid,
2900 })
2901 lt := lti.(layoutType)
2902 return lt.t, lt.framePool, lt.abid
2903 }
2904
2905
2906 func ifaceIndir(t *abi.Type) bool {
2907 return t.Kind_&abi.KindDirectIface == 0
2908 }
2909
2910
2911 type bitVector struct {
2912 n uint32
2913 data []byte
2914 }
2915
2916
2917 func (bv *bitVector) append(bit uint8) {
2918 if bv.n%(8*goarch.PtrSize) == 0 {
2919
2920
2921
2922 for i := 0; i < goarch.PtrSize; i++ {
2923 bv.data = append(bv.data, 0)
2924 }
2925 }
2926 bv.data[bv.n/8] |= bit << (bv.n % 8)
2927 bv.n++
2928 }
2929
2930 func addTypeBits(bv *bitVector, offset uintptr, t *abi.Type) {
2931 if !t.Pointers() {
2932 return
2933 }
2934
2935 switch Kind(t.Kind_ & abi.KindMask) {
2936 case Chan, Func, Map, Pointer, Slice, String, UnsafePointer:
2937
2938 for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
2939 bv.append(0)
2940 }
2941 bv.append(1)
2942
2943 case Interface:
2944
2945 for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
2946 bv.append(0)
2947 }
2948 bv.append(1)
2949 bv.append(1)
2950
2951 case Array:
2952
2953 tt := (*arrayType)(unsafe.Pointer(t))
2954 for i := 0; i < int(tt.Len); i++ {
2955 addTypeBits(bv, offset+uintptr(i)*tt.Elem.Size_, tt.Elem)
2956 }
2957
2958 case Struct:
2959
2960 tt := (*structType)(unsafe.Pointer(t))
2961 for i := range tt.Fields {
2962 f := &tt.Fields[i]
2963 addTypeBits(bv, offset+f.Offset, f.Typ)
2964 }
2965 }
2966 }
2967
2968
2969 func TypeFor[T any]() Type {
2970 var v T
2971 if t := TypeOf(v); t != nil {
2972 return t
2973 }
2974 return TypeOf((*T)(nil)).Elem()
2975 }
2976
View as plain text