Source file
src/reflect/type.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 package reflect
17
18 import (
19 "internal/abi"
20 "internal/goarch"
21 "strconv"
22 "sync"
23 "unicode"
24 "unicode/utf8"
25 "unsafe"
26 )
27
28
29
30
31
32
33
34
35
36
37
38
39 type Type interface {
40
41
42
43
44 Align() int
45
46
47
48 FieldAlign() int
49
50
51
52
53
54
55
56
57
58
59
60
61 Method(int) Method
62
63
64
65
66
67
68
69
70
71 MethodByName(string) (Method, bool)
72
73
74
75
76
77
78 NumMethod() int
79
80
81
82 Name() string
83
84
85
86
87
88
89 PkgPath() string
90
91
92
93 Size() uintptr
94
95
96
97
98
99
100 String() string
101
102
103 Kind() Kind
104
105
106 Implements(u Type) bool
107
108
109 AssignableTo(u Type) bool
110
111
112
113
114
115 ConvertibleTo(u Type) bool
116
117
118
119
120
121 Comparable() bool
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138 Bits() int
139
140
141
142 ChanDir() ChanDir
143
144
145
146
147
148
149
150
151
152
153
154
155
156 IsVariadic() bool
157
158
159
160 Elem() Type
161
162
163
164
165 Field(i int) StructField
166
167
168
169
170
171 FieldByIndex(index []int) StructField
172
173
174
175
176
177
178 FieldByName(name string) (StructField, bool)
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196 FieldByNameFunc(match func(string) bool) (StructField, bool)
197
198
199
200
201 In(i int) Type
202
203
204
205 Key() Type
206
207
208
209 Len() int
210
211
212
213 NumField() int
214
215
216
217 NumIn() int
218
219
220
221 NumOut() int
222
223
224
225
226 Out(i int) Type
227
228
229
230 OverflowComplex(x complex128) bool
231
232
233
234 OverflowFloat(x float64) bool
235
236
237
238 OverflowInt(x int64) bool
239
240
241
242 OverflowUint(x uint64) bool
243
244
245 CanSeq() bool
246
247
248 CanSeq2() bool
249
250 common() *abi.Type
251 uncommon() *uncommonType
252 }
253
254
255
256
257
258
259
260
261
262
267
268
269
270 type Kind uint
271
272 const (
273 Invalid Kind = iota
274 Bool
275 Int
276 Int8
277 Int16
278 Int32
279 Int64
280 Uint
281 Uint8
282 Uint16
283 Uint32
284 Uint64
285 Uintptr
286 Float32
287 Float64
288 Complex64
289 Complex128
290 Array
291 Chan
292 Func
293 Interface
294 Map
295 Pointer
296 Slice
297 String
298 Struct
299 UnsafePointer
300 )
301
302
303 const Ptr = Pointer
304
305
306
307
308
309 type uncommonType = abi.UncommonType
310
311
312 type common struct {
313 abi.Type
314 }
315
316
317
318 type rtype struct {
319 t abi.Type
320 }
321
322 func (t *rtype) common() *abi.Type {
323 return &t.t
324 }
325
326 func (t *rtype) uncommon() *abi.UncommonType {
327 return t.t.Uncommon()
328 }
329
330 type aNameOff = abi.NameOff
331 type aTypeOff = abi.TypeOff
332 type aTextOff = abi.TextOff
333
334
335 type ChanDir int
336
337 const (
338 RecvDir ChanDir = 1 << iota
339 SendDir
340 BothDir = RecvDir | SendDir
341 )
342
343
344 type arrayType = abi.ArrayType
345
346
347 type chanType = abi.ChanType
348
349
350
351
352
353
354
355
356
357
358
359
360 type funcType = abi.FuncType
361
362
363 type interfaceType struct {
364 abi.InterfaceType
365 }
366
367 func (t *interfaceType) nameOff(off aNameOff) abi.Name {
368 return toRType(&t.Type).nameOff(off)
369 }
370
371 func nameOffFor(t *abi.Type, off aNameOff) abi.Name {
372 return toRType(t).nameOff(off)
373 }
374
375 func typeOffFor(t *abi.Type, off aTypeOff) *abi.Type {
376 return toRType(t).typeOff(off)
377 }
378
379 func (t *interfaceType) typeOff(off aTypeOff) *abi.Type {
380 return toRType(&t.Type).typeOff(off)
381 }
382
383 func (t *interfaceType) common() *abi.Type {
384 return &t.Type
385 }
386
387 func (t *interfaceType) uncommon() *abi.UncommonType {
388 return t.Uncommon()
389 }
390
391
392 type mapType struct {
393 abi.MapType
394 }
395
396
397 type ptrType struct {
398 abi.PtrType
399 }
400
401
402 type sliceType struct {
403 abi.SliceType
404 }
405
406
407 type structField = abi.StructField
408
409
410 type structType struct {
411 abi.StructType
412 }
413
414 func pkgPath(n abi.Name) string {
415 if n.Bytes == nil || *n.DataChecked(0, "name flag field")&(1<<2) == 0 {
416 return ""
417 }
418 i, l := n.ReadVarint(1)
419 off := 1 + i + l
420 if n.HasTag() {
421 i2, l2 := n.ReadVarint(off)
422 off += i2 + l2
423 }
424 var nameOff int32
425
426
427 copy((*[4]byte)(unsafe.Pointer(&nameOff))[:], (*[4]byte)(unsafe.Pointer(n.DataChecked(off, "name offset field")))[:])
428 pkgPathName := abi.Name{Bytes: (*byte)(resolveTypeOff(unsafe.Pointer(n.Bytes), nameOff))}
429 return pkgPathName.Name()
430 }
431
432 func newName(n, tag string, exported, embedded bool) abi.Name {
433 return abi.NewName(n, tag, exported, embedded)
434 }
435
436
440
441
442 type Method struct {
443
444 Name string
445
446
447
448
449
450
451 PkgPath string
452
453 Type Type
454 Func Value
455 Index int
456 }
457
458
459 func (m Method) IsExported() bool {
460 return m.PkgPath == ""
461 }
462
463
464 func (k Kind) String() string {
465 if uint(k) < uint(len(kindNames)) {
466 return kindNames[uint(k)]
467 }
468 return "kind" + strconv.Itoa(int(k))
469 }
470
471 var kindNames = []string{
472 Invalid: "invalid",
473 Bool: "bool",
474 Int: "int",
475 Int8: "int8",
476 Int16: "int16",
477 Int32: "int32",
478 Int64: "int64",
479 Uint: "uint",
480 Uint8: "uint8",
481 Uint16: "uint16",
482 Uint32: "uint32",
483 Uint64: "uint64",
484 Uintptr: "uintptr",
485 Float32: "float32",
486 Float64: "float64",
487 Complex64: "complex64",
488 Complex128: "complex128",
489 Array: "array",
490 Chan: "chan",
491 Func: "func",
492 Interface: "interface",
493 Map: "map",
494 Pointer: "ptr",
495 Slice: "slice",
496 String: "string",
497 Struct: "struct",
498 UnsafePointer: "unsafe.Pointer",
499 }
500
501
502
503
504
505
506 func resolveNameOff(ptrInModule unsafe.Pointer, off int32) unsafe.Pointer
507
508
509
510
511
512
513 func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
514
515
516
517
518
519
520 func resolveTextOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
521
522
523
524
525
526
527 func addReflectOff(ptr unsafe.Pointer) int32
528
529
530
531 func resolveReflectName(n abi.Name) aNameOff {
532 return aNameOff(addReflectOff(unsafe.Pointer(n.Bytes)))
533 }
534
535
536
537 func resolveReflectType(t *abi.Type) aTypeOff {
538 return aTypeOff(addReflectOff(unsafe.Pointer(t)))
539 }
540
541
542
543
544 func resolveReflectText(ptr unsafe.Pointer) aTextOff {
545 return aTextOff(addReflectOff(ptr))
546 }
547
548 func (t *rtype) nameOff(off aNameOff) abi.Name {
549 return abi.Name{Bytes: (*byte)(resolveNameOff(unsafe.Pointer(t), int32(off)))}
550 }
551
552 func (t *rtype) typeOff(off aTypeOff) *abi.Type {
553 return (*abi.Type)(resolveTypeOff(unsafe.Pointer(t), int32(off)))
554 }
555
556 func (t *rtype) textOff(off aTextOff) unsafe.Pointer {
557 return resolveTextOff(unsafe.Pointer(t), int32(off))
558 }
559
560 func textOffFor(t *abi.Type, off aTextOff) unsafe.Pointer {
561 return toRType(t).textOff(off)
562 }
563
564 func (t *rtype) String() string {
565 s := t.nameOff(t.t.Str).Name()
566 if t.t.TFlag&abi.TFlagExtraStar != 0 {
567 return s[1:]
568 }
569 return s
570 }
571
572 func (t *rtype) Size() uintptr { return t.t.Size() }
573
574 func (t *rtype) Bits() int {
575 if t == nil {
576 panic("reflect: Bits of nil Type")
577 }
578 k := t.Kind()
579 if k < Int || k > Complex128 {
580 panic("reflect: Bits of non-arithmetic Type " + t.String())
581 }
582 return int(t.t.Size_) * 8
583 }
584
585 func (t *rtype) Align() int { return t.t.Align() }
586
587 func (t *rtype) FieldAlign() int { return t.t.FieldAlign() }
588
589 func (t *rtype) Kind() Kind { return Kind(t.t.Kind()) }
590
591 func (t *rtype) exportedMethods() []abi.Method {
592 ut := t.uncommon()
593 if ut == nil {
594 return nil
595 }
596 return ut.ExportedMethods()
597 }
598
599 func (t *rtype) NumMethod() int {
600 if t.Kind() == Interface {
601 tt := (*interfaceType)(unsafe.Pointer(t))
602 return tt.NumMethod()
603 }
604 return len(t.exportedMethods())
605 }
606
607 func (t *rtype) Method(i int) (m Method) {
608 if t.Kind() == Interface {
609 tt := (*interfaceType)(unsafe.Pointer(t))
610 return tt.Method(i)
611 }
612 methods := t.exportedMethods()
613 if i < 0 || i >= len(methods) {
614 panic("reflect: Method index out of range")
615 }
616 p := methods[i]
617 pname := t.nameOff(p.Name)
618 m.Name = pname.Name()
619 fl := flag(Func)
620 mtyp := t.typeOff(p.Mtyp)
621 ft := (*funcType)(unsafe.Pointer(mtyp))
622 in := make([]Type, 0, 1+ft.NumIn())
623 in = append(in, t)
624 for _, arg := range ft.InSlice() {
625 in = append(in, toRType(arg))
626 }
627 out := make([]Type, 0, ft.NumOut())
628 for _, ret := range ft.OutSlice() {
629 out = append(out, toRType(ret))
630 }
631 mt := FuncOf(in, out, ft.IsVariadic())
632 m.Type = mt
633 tfn := t.textOff(p.Tfn)
634 fn := unsafe.Pointer(&tfn)
635 m.Func = Value{&mt.(*rtype).t, fn, fl}
636
637 m.Index = i
638 return m
639 }
640
641 func (t *rtype) MethodByName(name string) (m Method, ok bool) {
642 if t.Kind() == Interface {
643 tt := (*interfaceType)(unsafe.Pointer(t))
644 return tt.MethodByName(name)
645 }
646 ut := t.uncommon()
647 if ut == nil {
648 return Method{}, false
649 }
650
651 methods := ut.ExportedMethods()
652
653
654
655 i, j := 0, len(methods)
656 for i < j {
657 h := int(uint(i+j) >> 1)
658
659 if !(t.nameOff(methods[h].Name).Name() >= name) {
660 i = h + 1
661 } else {
662 j = h
663 }
664 }
665
666 if i < len(methods) && name == t.nameOff(methods[i].Name).Name() {
667 return t.Method(i), true
668 }
669
670 return Method{}, false
671 }
672
673 func (t *rtype) PkgPath() string {
674 if t.t.TFlag&abi.TFlagNamed == 0 {
675 return ""
676 }
677 ut := t.uncommon()
678 if ut == nil {
679 return ""
680 }
681 return t.nameOff(ut.PkgPath).Name()
682 }
683
684 func pkgPathFor(t *abi.Type) string {
685 return toRType(t).PkgPath()
686 }
687
688 func (t *rtype) Name() string {
689 if !t.t.HasName() {
690 return ""
691 }
692 s := t.String()
693 i := len(s) - 1
694 sqBrackets := 0
695 for i >= 0 && (s[i] != '.' || sqBrackets != 0) {
696 switch s[i] {
697 case ']':
698 sqBrackets++
699 case '[':
700 sqBrackets--
701 }
702 i--
703 }
704 return s[i+1:]
705 }
706
707 func nameFor(t *abi.Type) string {
708 return toRType(t).Name()
709 }
710
711 func (t *rtype) ChanDir() ChanDir {
712 if t.Kind() != Chan {
713 panic("reflect: ChanDir of non-chan type " + t.String())
714 }
715 tt := (*abi.ChanType)(unsafe.Pointer(t))
716 return ChanDir(tt.Dir)
717 }
718
719 func toRType(t *abi.Type) *rtype {
720 return (*rtype)(unsafe.Pointer(t))
721 }
722
723 func elem(t *abi.Type) *abi.Type {
724 et := t.Elem()
725 if et != nil {
726 return et
727 }
728 panic("reflect: Elem of invalid type " + stringFor(t))
729 }
730
731 func (t *rtype) Elem() Type {
732 return toType(elem(t.common()))
733 }
734
735 func (t *rtype) Field(i int) StructField {
736 if t.Kind() != Struct {
737 panic("reflect: Field of non-struct type " + t.String())
738 }
739 tt := (*structType)(unsafe.Pointer(t))
740 return tt.Field(i)
741 }
742
743 func (t *rtype) FieldByIndex(index []int) StructField {
744 if t.Kind() != Struct {
745 panic("reflect: FieldByIndex of non-struct type " + t.String())
746 }
747 tt := (*structType)(unsafe.Pointer(t))
748 return tt.FieldByIndex(index)
749 }
750
751 func (t *rtype) FieldByName(name string) (StructField, bool) {
752 if t.Kind() != Struct {
753 panic("reflect: FieldByName of non-struct type " + t.String())
754 }
755 tt := (*structType)(unsafe.Pointer(t))
756 return tt.FieldByName(name)
757 }
758
759 func (t *rtype) FieldByNameFunc(match func(string) bool) (StructField, bool) {
760 if t.Kind() != Struct {
761 panic("reflect: FieldByNameFunc of non-struct type " + t.String())
762 }
763 tt := (*structType)(unsafe.Pointer(t))
764 return tt.FieldByNameFunc(match)
765 }
766
767 func (t *rtype) Key() Type {
768 if t.Kind() != Map {
769 panic("reflect: Key of non-map type " + t.String())
770 }
771 tt := (*mapType)(unsafe.Pointer(t))
772 return toType(tt.Key)
773 }
774
775 func (t *rtype) Len() int {
776 if t.Kind() != Array {
777 panic("reflect: Len of non-array type " + t.String())
778 }
779 tt := (*arrayType)(unsafe.Pointer(t))
780 return int(tt.Len)
781 }
782
783 func (t *rtype) NumField() int {
784 if t.Kind() != Struct {
785 panic("reflect: NumField of non-struct type " + t.String())
786 }
787 tt := (*structType)(unsafe.Pointer(t))
788 return len(tt.Fields)
789 }
790
791 func (t *rtype) In(i int) Type {
792 if t.Kind() != Func {
793 panic("reflect: In of non-func type " + t.String())
794 }
795 tt := (*abi.FuncType)(unsafe.Pointer(t))
796 return toType(tt.InSlice()[i])
797 }
798
799 func (t *rtype) NumIn() int {
800 if t.Kind() != Func {
801 panic("reflect: NumIn of non-func type " + t.String())
802 }
803 tt := (*abi.FuncType)(unsafe.Pointer(t))
804 return tt.NumIn()
805 }
806
807 func (t *rtype) NumOut() int {
808 if t.Kind() != Func {
809 panic("reflect: NumOut of non-func type " + t.String())
810 }
811 tt := (*abi.FuncType)(unsafe.Pointer(t))
812 return tt.NumOut()
813 }
814
815 func (t *rtype) Out(i int) Type {
816 if t.Kind() != Func {
817 panic("reflect: Out of non-func type " + t.String())
818 }
819 tt := (*abi.FuncType)(unsafe.Pointer(t))
820 return toType(tt.OutSlice()[i])
821 }
822
823 func (t *rtype) IsVariadic() bool {
824 if t.Kind() != Func {
825 panic("reflect: IsVariadic of non-func type " + t.String())
826 }
827 tt := (*abi.FuncType)(unsafe.Pointer(t))
828 return tt.IsVariadic()
829 }
830
831 func (t *rtype) OverflowComplex(x complex128) bool {
832 k := t.Kind()
833 switch k {
834 case Complex64:
835 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
836 case Complex128:
837 return false
838 }
839 panic("reflect: OverflowComplex of non-complex type " + t.String())
840 }
841
842 func (t *rtype) OverflowFloat(x float64) bool {
843 k := t.Kind()
844 switch k {
845 case Float32:
846 return overflowFloat32(x)
847 case Float64:
848 return false
849 }
850 panic("reflect: OverflowFloat of non-float type " + t.String())
851 }
852
853 func (t *rtype) OverflowInt(x int64) bool {
854 k := t.Kind()
855 switch k {
856 case Int, Int8, Int16, Int32, Int64:
857 bitSize := t.Size() * 8
858 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
859 return x != trunc
860 }
861 panic("reflect: OverflowInt of non-int type " + t.String())
862 }
863
864 func (t *rtype) OverflowUint(x uint64) bool {
865 k := t.Kind()
866 switch k {
867 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
868 bitSize := t.Size() * 8
869 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
870 return x != trunc
871 }
872 panic("reflect: OverflowUint of non-uint type " + t.String())
873 }
874
875 func (t *rtype) CanSeq() bool {
876 switch t.Kind() {
877 case Int8, Int16, Int32, Int64, Int, Uint8, Uint16, Uint32, Uint64, Uint, Uintptr, Array, Slice, Chan, String, Map:
878 return true
879 case Func:
880 return canRangeFunc(&t.t)
881 case Pointer:
882 return t.Elem().Kind() == Array
883 }
884 return false
885 }
886
887 func canRangeFunc(t *abi.Type) bool {
888 if t.Kind() != abi.Func {
889 return false
890 }
891 f := t.FuncType()
892 if f.InCount != 1 || f.OutCount != 0 {
893 return false
894 }
895 y := f.In(0)
896 if y.Kind() != abi.Func {
897 return false
898 }
899 yield := y.FuncType()
900 return yield.InCount == 1 && yield.OutCount == 1 && yield.Out(0).Kind() == abi.Bool
901 }
902
903 func (t *rtype) CanSeq2() bool {
904 switch t.Kind() {
905 case Array, Slice, String, Map:
906 return true
907 case Func:
908 return canRangeFunc2(&t.t)
909 case Pointer:
910 return t.Elem().Kind() == Array
911 }
912 return false
913 }
914
915 func canRangeFunc2(t *abi.Type) bool {
916 if t.Kind() != abi.Func {
917 return false
918 }
919 f := t.FuncType()
920 if f.InCount != 1 || f.OutCount != 0 {
921 return false
922 }
923 y := f.In(0)
924 if y.Kind() != abi.Func {
925 return false
926 }
927 yield := y.FuncType()
928 return yield.InCount == 2 && yield.OutCount == 1 && yield.Out(0).Kind() == abi.Bool
929 }
930
931
932
933
934
935
936
937
938 func add(p unsafe.Pointer, x uintptr, whySafe string) unsafe.Pointer {
939 return unsafe.Pointer(uintptr(p) + x)
940 }
941
942 func (d ChanDir) String() string {
943 switch d {
944 case SendDir:
945 return "chan<-"
946 case RecvDir:
947 return "<-chan"
948 case BothDir:
949 return "chan"
950 }
951 return "ChanDir" + strconv.Itoa(int(d))
952 }
953
954
955 func (t *interfaceType) Method(i int) (m Method) {
956 if i < 0 || i >= len(t.Methods) {
957 return
958 }
959 p := &t.Methods[i]
960 pname := t.nameOff(p.Name)
961 m.Name = pname.Name()
962 if !pname.IsExported() {
963 m.PkgPath = pkgPath(pname)
964 if m.PkgPath == "" {
965 m.PkgPath = t.PkgPath.Name()
966 }
967 }
968 m.Type = toType(t.typeOff(p.Typ))
969 m.Index = i
970 return
971 }
972
973
974 func (t *interfaceType) NumMethod() int { return len(t.Methods) }
975
976
977 func (t *interfaceType) MethodByName(name string) (m Method, ok bool) {
978 if t == nil {
979 return
980 }
981 var p *abi.Imethod
982 for i := range t.Methods {
983 p = &t.Methods[i]
984 if t.nameOff(p.Name).Name() == name {
985 return t.Method(i), true
986 }
987 }
988 return
989 }
990
991
992 type StructField struct {
993
994 Name string
995
996
997
998
999 PkgPath string
1000
1001 Type Type
1002 Tag StructTag
1003 Offset uintptr
1004 Index []int
1005 Anonymous bool
1006 }
1007
1008
1009 func (f StructField) IsExported() bool {
1010 return f.PkgPath == ""
1011 }
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021 type StructTag string
1022
1023
1024
1025
1026
1027
1028 func (tag StructTag) Get(key string) string {
1029 v, _ := tag.Lookup(key)
1030 return v
1031 }
1032
1033
1034
1035
1036
1037
1038
1039 func (tag StructTag) Lookup(key string) (value string, ok bool) {
1040
1041
1042
1043 for tag != "" {
1044
1045 i := 0
1046 for i < len(tag) && tag[i] == ' ' {
1047 i++
1048 }
1049 tag = tag[i:]
1050 if tag == "" {
1051 break
1052 }
1053
1054
1055
1056
1057
1058 i = 0
1059 for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
1060 i++
1061 }
1062 if i == 0 || i+1 >= len(tag) || tag[i] != ':' || tag[i+1] != '"' {
1063 break
1064 }
1065 name := string(tag[:i])
1066 tag = tag[i+1:]
1067
1068
1069 i = 1
1070 for i < len(tag) && tag[i] != '"' {
1071 if tag[i] == '\\' {
1072 i++
1073 }
1074 i++
1075 }
1076 if i >= len(tag) {
1077 break
1078 }
1079 qvalue := string(tag[:i+1])
1080 tag = tag[i+1:]
1081
1082 if key == name {
1083 value, err := strconv.Unquote(qvalue)
1084 if err != nil {
1085 break
1086 }
1087 return value, true
1088 }
1089 }
1090 return "", false
1091 }
1092
1093
1094 func (t *structType) Field(i int) (f StructField) {
1095 if i < 0 || i >= len(t.Fields) {
1096 panic("reflect: Field index out of bounds")
1097 }
1098 p := &t.Fields[i]
1099 f.Type = toType(p.Typ)
1100 f.Name = p.Name.Name()
1101 f.Anonymous = p.Embedded()
1102 if !p.Name.IsExported() {
1103 f.PkgPath = t.PkgPath.Name()
1104 }
1105 if tag := p.Name.Tag(); tag != "" {
1106 f.Tag = StructTag(tag)
1107 }
1108 f.Offset = p.Offset
1109
1110
1111
1112
1113
1114
1115
1116
1117 f.Index = []int{i}
1118 return
1119 }
1120
1121
1122
1123
1124
1125 func (t *structType) FieldByIndex(index []int) (f StructField) {
1126 f.Type = toType(&t.Type)
1127 for i, x := range index {
1128 if i > 0 {
1129 ft := f.Type
1130 if ft.Kind() == Pointer && ft.Elem().Kind() == Struct {
1131 ft = ft.Elem()
1132 }
1133 f.Type = ft
1134 }
1135 f = f.Type.Field(x)
1136 }
1137 return
1138 }
1139
1140
1141 type fieldScan struct {
1142 typ *structType
1143 index []int
1144 }
1145
1146
1147
1148 func (t *structType) FieldByNameFunc(match func(string) bool) (result StructField, ok bool) {
1149
1150
1151
1152
1153
1154
1155
1156
1157 current := []fieldScan{}
1158 next := []fieldScan{{typ: t}}
1159
1160
1161
1162
1163
1164
1165
1166 var nextCount map[*structType]int
1167
1168
1169
1170
1171
1172
1173 visited := map[*structType]bool{}
1174
1175 for len(next) > 0 {
1176 current, next = next, current[:0]
1177 count := nextCount
1178 nextCount = nil
1179
1180
1181
1182
1183
1184 for _, scan := range current {
1185 t := scan.typ
1186 if visited[t] {
1187
1188
1189
1190 continue
1191 }
1192 visited[t] = true
1193 for i := range t.Fields {
1194 f := &t.Fields[i]
1195
1196 fname := f.Name.Name()
1197 var ntyp *abi.Type
1198 if f.Embedded() {
1199
1200 ntyp = f.Typ
1201 if ntyp.Kind() == abi.Pointer {
1202 ntyp = ntyp.Elem()
1203 }
1204 }
1205
1206
1207 if match(fname) {
1208
1209 if count[t] > 1 || ok {
1210
1211 return StructField{}, false
1212 }
1213 result = t.Field(i)
1214 result.Index = nil
1215 result.Index = append(result.Index, scan.index...)
1216 result.Index = append(result.Index, i)
1217 ok = true
1218 continue
1219 }
1220
1221
1222
1223
1224 if ok || ntyp == nil || ntyp.Kind() != abi.Struct {
1225 continue
1226 }
1227 styp := (*structType)(unsafe.Pointer(ntyp))
1228 if nextCount[styp] > 0 {
1229 nextCount[styp] = 2
1230 continue
1231 }
1232 if nextCount == nil {
1233 nextCount = map[*structType]int{}
1234 }
1235 nextCount[styp] = 1
1236 if count[t] > 1 {
1237 nextCount[styp] = 2
1238 }
1239 var index []int
1240 index = append(index, scan.index...)
1241 index = append(index, i)
1242 next = append(next, fieldScan{styp, index})
1243 }
1244 }
1245 if ok {
1246 break
1247 }
1248 }
1249 return
1250 }
1251
1252
1253
1254 func (t *structType) FieldByName(name string) (f StructField, present bool) {
1255
1256 hasEmbeds := false
1257 if name != "" {
1258 for i := range t.Fields {
1259 tf := &t.Fields[i]
1260 if tf.Name.Name() == name {
1261 return t.Field(i), true
1262 }
1263 if tf.Embedded() {
1264 hasEmbeds = true
1265 }
1266 }
1267 }
1268 if !hasEmbeds {
1269 return
1270 }
1271 return t.FieldByNameFunc(func(s string) bool { return s == name })
1272 }
1273
1274
1275
1276 func TypeOf(i any) Type {
1277 return toType(abi.TypeOf(i))
1278 }
1279
1280
1281 func rtypeOf(i any) *abi.Type {
1282 return abi.TypeOf(i)
1283 }
1284
1285
1286 var ptrMap sync.Map
1287
1288
1289
1290
1291
1292
1293
1294
1295 func PtrTo(t Type) Type { return PointerTo(t) }
1296
1297
1298
1299 func PointerTo(t Type) Type {
1300 return toRType(t.(*rtype).ptrTo())
1301 }
1302
1303 func (t *rtype) ptrTo() *abi.Type {
1304 at := &t.t
1305 if at.PtrToThis != 0 {
1306 return t.typeOff(at.PtrToThis)
1307 }
1308
1309
1310 if pi, ok := ptrMap.Load(t); ok {
1311 return &pi.(*ptrType).Type
1312 }
1313
1314
1315 s := "*" + t.String()
1316 for _, tt := range typesByString(s) {
1317 p := (*ptrType)(unsafe.Pointer(tt))
1318 if p.Elem != &t.t {
1319 continue
1320 }
1321 pi, _ := ptrMap.LoadOrStore(t, p)
1322 return &pi.(*ptrType).Type
1323 }
1324
1325
1326
1327 var iptr any = (*unsafe.Pointer)(nil)
1328 prototype := *(**ptrType)(unsafe.Pointer(&iptr))
1329 pp := *prototype
1330
1331 pp.Str = resolveReflectName(newName(s, "", false, false))
1332 pp.PtrToThis = 0
1333
1334
1335
1336
1337
1338
1339 pp.Hash = fnv1(t.t.Hash, '*')
1340
1341 pp.Elem = at
1342
1343 pi, _ := ptrMap.LoadOrStore(t, &pp)
1344 return &pi.(*ptrType).Type
1345 }
1346
1347 func ptrTo(t *abi.Type) *abi.Type {
1348 return toRType(t).ptrTo()
1349 }
1350
1351
1352 func fnv1(x uint32, list ...byte) uint32 {
1353 for _, b := range list {
1354 x = x*16777619 ^ uint32(b)
1355 }
1356 return x
1357 }
1358
1359 func (t *rtype) Implements(u Type) bool {
1360 if u == nil {
1361 panic("reflect: nil type passed to Type.Implements")
1362 }
1363 if u.Kind() != Interface {
1364 panic("reflect: non-interface type passed to Type.Implements")
1365 }
1366 return implements(u.common(), t.common())
1367 }
1368
1369 func (t *rtype) AssignableTo(u Type) bool {
1370 if u == nil {
1371 panic("reflect: nil type passed to Type.AssignableTo")
1372 }
1373 uu := u.common()
1374 return directlyAssignable(uu, t.common()) || implements(uu, t.common())
1375 }
1376
1377 func (t *rtype) ConvertibleTo(u Type) bool {
1378 if u == nil {
1379 panic("reflect: nil type passed to Type.ConvertibleTo")
1380 }
1381 return convertOp(u.common(), t.common()) != nil
1382 }
1383
1384 func (t *rtype) Comparable() bool {
1385 return t.t.Equal != nil
1386 }
1387
1388
1389 func implements(T, V *abi.Type) bool {
1390 if T.Kind() != abi.Interface {
1391 return false
1392 }
1393 t := (*interfaceType)(unsafe.Pointer(T))
1394 if len(t.Methods) == 0 {
1395 return true
1396 }
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410 if V.Kind() == abi.Interface {
1411 v := (*interfaceType)(unsafe.Pointer(V))
1412 i := 0
1413 for j := 0; j < len(v.Methods); j++ {
1414 tm := &t.Methods[i]
1415 tmName := t.nameOff(tm.Name)
1416 vm := &v.Methods[j]
1417 vmName := nameOffFor(V, vm.Name)
1418 if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Typ) == t.typeOff(tm.Typ) {
1419 if !tmName.IsExported() {
1420 tmPkgPath := pkgPath(tmName)
1421 if tmPkgPath == "" {
1422 tmPkgPath = t.PkgPath.Name()
1423 }
1424 vmPkgPath := pkgPath(vmName)
1425 if vmPkgPath == "" {
1426 vmPkgPath = v.PkgPath.Name()
1427 }
1428 if tmPkgPath != vmPkgPath {
1429 continue
1430 }
1431 }
1432 if i++; i >= len(t.Methods) {
1433 return true
1434 }
1435 }
1436 }
1437 return false
1438 }
1439
1440 v := V.Uncommon()
1441 if v == nil {
1442 return false
1443 }
1444 i := 0
1445 vmethods := v.Methods()
1446 for j := 0; j < int(v.Mcount); j++ {
1447 tm := &t.Methods[i]
1448 tmName := t.nameOff(tm.Name)
1449 vm := vmethods[j]
1450 vmName := nameOffFor(V, vm.Name)
1451 if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Mtyp) == t.typeOff(tm.Typ) {
1452 if !tmName.IsExported() {
1453 tmPkgPath := pkgPath(tmName)
1454 if tmPkgPath == "" {
1455 tmPkgPath = t.PkgPath.Name()
1456 }
1457 vmPkgPath := pkgPath(vmName)
1458 if vmPkgPath == "" {
1459 vmPkgPath = nameOffFor(V, v.PkgPath).Name()
1460 }
1461 if tmPkgPath != vmPkgPath {
1462 continue
1463 }
1464 }
1465 if i++; i >= len(t.Methods) {
1466 return true
1467 }
1468 }
1469 }
1470 return false
1471 }
1472
1473
1474
1475
1476
1477 func specialChannelAssignability(T, V *abi.Type) bool {
1478
1479
1480
1481
1482 return V.ChanDir() == abi.BothDir && (nameFor(T) == "" || nameFor(V) == "") && haveIdenticalType(T.Elem(), V.Elem(), true)
1483 }
1484
1485
1486
1487
1488
1489
1490 func directlyAssignable(T, V *abi.Type) bool {
1491
1492 if T == V {
1493 return true
1494 }
1495
1496
1497
1498 if T.HasName() && V.HasName() || T.Kind() != V.Kind() {
1499 return false
1500 }
1501
1502 if T.Kind() == abi.Chan && specialChannelAssignability(T, V) {
1503 return true
1504 }
1505
1506
1507 return haveIdenticalUnderlyingType(T, V, true)
1508 }
1509
1510 func haveIdenticalType(T, V *abi.Type, cmpTags bool) bool {
1511 if cmpTags {
1512 return T == V
1513 }
1514
1515 if nameFor(T) != nameFor(V) || T.Kind() != V.Kind() || pkgPathFor(T) != pkgPathFor(V) {
1516 return false
1517 }
1518
1519 return haveIdenticalUnderlyingType(T, V, false)
1520 }
1521
1522 func haveIdenticalUnderlyingType(T, V *abi.Type, cmpTags bool) bool {
1523 if T == V {
1524 return true
1525 }
1526
1527 kind := Kind(T.Kind())
1528 if kind != Kind(V.Kind()) {
1529 return false
1530 }
1531
1532
1533
1534 if Bool <= kind && kind <= Complex128 || kind == String || kind == UnsafePointer {
1535 return true
1536 }
1537
1538
1539 switch kind {
1540 case Array:
1541 return T.Len() == V.Len() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1542
1543 case Chan:
1544 return V.ChanDir() == T.ChanDir() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1545
1546 case Func:
1547 t := (*funcType)(unsafe.Pointer(T))
1548 v := (*funcType)(unsafe.Pointer(V))
1549 if t.OutCount != v.OutCount || t.InCount != v.InCount {
1550 return false
1551 }
1552 for i := 0; i < t.NumIn(); i++ {
1553 if !haveIdenticalType(t.In(i), v.In(i), cmpTags) {
1554 return false
1555 }
1556 }
1557 for i := 0; i < t.NumOut(); i++ {
1558 if !haveIdenticalType(t.Out(i), v.Out(i), cmpTags) {
1559 return false
1560 }
1561 }
1562 return true
1563
1564 case Interface:
1565 t := (*interfaceType)(unsafe.Pointer(T))
1566 v := (*interfaceType)(unsafe.Pointer(V))
1567 if len(t.Methods) == 0 && len(v.Methods) == 0 {
1568 return true
1569 }
1570
1571
1572 return false
1573
1574 case Map:
1575 return haveIdenticalType(T.Key(), V.Key(), cmpTags) && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1576
1577 case Pointer, Slice:
1578 return haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1579
1580 case Struct:
1581 t := (*structType)(unsafe.Pointer(T))
1582 v := (*structType)(unsafe.Pointer(V))
1583 if len(t.Fields) != len(v.Fields) {
1584 return false
1585 }
1586 if t.PkgPath.Name() != v.PkgPath.Name() {
1587 return false
1588 }
1589 for i := range t.Fields {
1590 tf := &t.Fields[i]
1591 vf := &v.Fields[i]
1592 if tf.Name.Name() != vf.Name.Name() {
1593 return false
1594 }
1595 if !haveIdenticalType(tf.Typ, vf.Typ, cmpTags) {
1596 return false
1597 }
1598 if cmpTags && tf.Name.Tag() != vf.Name.Tag() {
1599 return false
1600 }
1601 if tf.Offset != vf.Offset {
1602 return false
1603 }
1604 if tf.Embedded() != vf.Embedded() {
1605 return false
1606 }
1607 }
1608 return true
1609 }
1610
1611 return false
1612 }
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633 func typelinks() (sections []unsafe.Pointer, offset [][]int32)
1634
1635 func rtypeOff(section unsafe.Pointer, off int32) *abi.Type {
1636 return (*abi.Type)(add(section, uintptr(off), "sizeof(rtype) > 0"))
1637 }
1638
1639
1640
1641
1642
1643 func typesByString(s string) []*abi.Type {
1644 sections, offset := typelinks()
1645 var ret []*abi.Type
1646
1647 for offsI, offs := range offset {
1648 section := sections[offsI]
1649
1650
1651
1652 i, j := 0, len(offs)
1653 for i < j {
1654 h := int(uint(i+j) >> 1)
1655
1656 if !(stringFor(rtypeOff(section, offs[h])) >= s) {
1657 i = h + 1
1658 } else {
1659 j = h
1660 }
1661 }
1662
1663
1664
1665
1666
1667 for j := i; j < len(offs); j++ {
1668 typ := rtypeOff(section, offs[j])
1669 if stringFor(typ) != s {
1670 break
1671 }
1672 ret = append(ret, typ)
1673 }
1674 }
1675 return ret
1676 }
1677
1678
1679 var lookupCache sync.Map
1680
1681
1682
1683
1684 type cacheKey struct {
1685 kind Kind
1686 t1 *abi.Type
1687 t2 *abi.Type
1688 extra uintptr
1689 }
1690
1691
1692
1693
1694 var funcLookupCache struct {
1695 sync.Mutex
1696
1697
1698
1699 m sync.Map
1700 }
1701
1702
1703
1704
1705
1706
1707 func ChanOf(dir ChanDir, t Type) Type {
1708 typ := t.common()
1709
1710
1711 ckey := cacheKey{Chan, typ, nil, uintptr(dir)}
1712 if ch, ok := lookupCache.Load(ckey); ok {
1713 return ch.(*rtype)
1714 }
1715
1716
1717 if typ.Size_ >= 1<<16 {
1718 panic("reflect.ChanOf: element size too large")
1719 }
1720
1721
1722 var s string
1723 switch dir {
1724 default:
1725 panic("reflect.ChanOf: invalid dir")
1726 case SendDir:
1727 s = "chan<- " + stringFor(typ)
1728 case RecvDir:
1729 s = "<-chan " + stringFor(typ)
1730 case BothDir:
1731 typeStr := stringFor(typ)
1732 if typeStr[0] == '<' {
1733
1734
1735
1736
1737 s = "chan (" + typeStr + ")"
1738 } else {
1739 s = "chan " + typeStr
1740 }
1741 }
1742 for _, tt := range typesByString(s) {
1743 ch := (*chanType)(unsafe.Pointer(tt))
1744 if ch.Elem == typ && ch.Dir == abi.ChanDir(dir) {
1745 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
1746 return ti.(Type)
1747 }
1748 }
1749
1750
1751 var ichan any = (chan unsafe.Pointer)(nil)
1752 prototype := *(**chanType)(unsafe.Pointer(&ichan))
1753 ch := *prototype
1754 ch.TFlag = abi.TFlagRegularMemory
1755 ch.Dir = abi.ChanDir(dir)
1756 ch.Str = resolveReflectName(newName(s, "", false, false))
1757 ch.Hash = fnv1(typ.Hash, 'c', byte(dir))
1758 ch.Elem = typ
1759
1760 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&ch.Type))
1761 return ti.(Type)
1762 }
1763
1764
1765
1766
1767
1768
1769
1770 func MapOf(key, elem Type) Type {
1771 ktyp := key.common()
1772 etyp := elem.common()
1773
1774 if ktyp.Equal == nil {
1775 panic("reflect.MapOf: invalid key type " + stringFor(ktyp))
1776 }
1777
1778
1779 ckey := cacheKey{Map, ktyp, etyp, 0}
1780 if mt, ok := lookupCache.Load(ckey); ok {
1781 return mt.(Type)
1782 }
1783
1784
1785 s := "map[" + stringFor(ktyp) + "]" + stringFor(etyp)
1786 for _, tt := range typesByString(s) {
1787 mt := (*mapType)(unsafe.Pointer(tt))
1788 if mt.Key == ktyp && mt.Elem == etyp {
1789 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
1790 return ti.(Type)
1791 }
1792 }
1793
1794
1795
1796
1797 var imap any = (map[unsafe.Pointer]unsafe.Pointer)(nil)
1798 mt := **(**mapType)(unsafe.Pointer(&imap))
1799 mt.Str = resolveReflectName(newName(s, "", false, false))
1800 mt.TFlag = 0
1801 mt.Hash = fnv1(etyp.Hash, 'm', byte(ktyp.Hash>>24), byte(ktyp.Hash>>16), byte(ktyp.Hash>>8), byte(ktyp.Hash))
1802 mt.Key = ktyp
1803 mt.Elem = etyp
1804 mt.Bucket = bucketOf(ktyp, etyp)
1805 mt.Hasher = func(p unsafe.Pointer, seed uintptr) uintptr {
1806 return typehash(ktyp, p, seed)
1807 }
1808 mt.Flags = 0
1809 if ktyp.Size_ > abi.MapMaxKeyBytes {
1810 mt.KeySize = uint8(goarch.PtrSize)
1811 mt.Flags |= 1
1812 } else {
1813 mt.KeySize = uint8(ktyp.Size_)
1814 }
1815 if etyp.Size_ > abi.MapMaxElemBytes {
1816 mt.ValueSize = uint8(goarch.PtrSize)
1817 mt.Flags |= 2
1818 } else {
1819 mt.MapType.ValueSize = uint8(etyp.Size_)
1820 }
1821 mt.MapType.BucketSize = uint16(mt.Bucket.Size_)
1822 if isReflexive(ktyp) {
1823 mt.Flags |= 4
1824 }
1825 if needKeyUpdate(ktyp) {
1826 mt.Flags |= 8
1827 }
1828 if hashMightPanic(ktyp) {
1829 mt.Flags |= 16
1830 }
1831 mt.PtrToThis = 0
1832
1833 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&mt.Type))
1834 return ti.(Type)
1835 }
1836
1837 var funcTypes []Type
1838 var funcTypesMutex sync.Mutex
1839
1840 func initFuncTypes(n int) Type {
1841 funcTypesMutex.Lock()
1842 defer funcTypesMutex.Unlock()
1843 if n >= len(funcTypes) {
1844 newFuncTypes := make([]Type, n+1)
1845 copy(newFuncTypes, funcTypes)
1846 funcTypes = newFuncTypes
1847 }
1848 if funcTypes[n] != nil {
1849 return funcTypes[n]
1850 }
1851
1852 funcTypes[n] = StructOf([]StructField{
1853 {
1854 Name: "FuncType",
1855 Type: TypeOf(funcType{}),
1856 },
1857 {
1858 Name: "Args",
1859 Type: ArrayOf(n, TypeOf(&rtype{})),
1860 },
1861 })
1862 return funcTypes[n]
1863 }
1864
1865
1866
1867
1868
1869
1870
1871
1872 func FuncOf(in, out []Type, variadic bool) Type {
1873 if variadic && (len(in) == 0 || in[len(in)-1].Kind() != Slice) {
1874 panic("reflect.FuncOf: last arg of variadic func must be slice")
1875 }
1876
1877
1878 var ifunc any = (func())(nil)
1879 prototype := *(**funcType)(unsafe.Pointer(&ifunc))
1880 n := len(in) + len(out)
1881
1882 if n > 128 {
1883 panic("reflect.FuncOf: too many arguments")
1884 }
1885
1886 o := New(initFuncTypes(n)).Elem()
1887 ft := (*funcType)(unsafe.Pointer(o.Field(0).Addr().Pointer()))
1888 args := unsafe.Slice((**rtype)(unsafe.Pointer(o.Field(1).Addr().Pointer())), n)[0:0:n]
1889 *ft = *prototype
1890
1891
1892 var hash uint32
1893 for _, in := range in {
1894 t := in.(*rtype)
1895 args = append(args, t)
1896 hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash))
1897 }
1898 if variadic {
1899 hash = fnv1(hash, 'v')
1900 }
1901 hash = fnv1(hash, '.')
1902 for _, out := range out {
1903 t := out.(*rtype)
1904 args = append(args, t)
1905 hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash))
1906 }
1907
1908 ft.TFlag = 0
1909 ft.Hash = hash
1910 ft.InCount = uint16(len(in))
1911 ft.OutCount = uint16(len(out))
1912 if variadic {
1913 ft.OutCount |= 1 << 15
1914 }
1915
1916
1917 if ts, ok := funcLookupCache.m.Load(hash); ok {
1918 for _, t := range ts.([]*abi.Type) {
1919 if haveIdenticalUnderlyingType(&ft.Type, t, true) {
1920 return toRType(t)
1921 }
1922 }
1923 }
1924
1925
1926 funcLookupCache.Lock()
1927 defer funcLookupCache.Unlock()
1928 if ts, ok := funcLookupCache.m.Load(hash); ok {
1929 for _, t := range ts.([]*abi.Type) {
1930 if haveIdenticalUnderlyingType(&ft.Type, t, true) {
1931 return toRType(t)
1932 }
1933 }
1934 }
1935
1936 addToCache := func(tt *abi.Type) Type {
1937 var rts []*abi.Type
1938 if rti, ok := funcLookupCache.m.Load(hash); ok {
1939 rts = rti.([]*abi.Type)
1940 }
1941 funcLookupCache.m.Store(hash, append(rts, tt))
1942 return toType(tt)
1943 }
1944
1945
1946 str := funcStr(ft)
1947 for _, tt := range typesByString(str) {
1948 if haveIdenticalUnderlyingType(&ft.Type, tt, true) {
1949 return addToCache(tt)
1950 }
1951 }
1952
1953
1954 ft.Str = resolveReflectName(newName(str, "", false, false))
1955 ft.PtrToThis = 0
1956 return addToCache(&ft.Type)
1957 }
1958 func stringFor(t *abi.Type) string {
1959 return toRType(t).String()
1960 }
1961
1962
1963 func funcStr(ft *funcType) string {
1964 repr := make([]byte, 0, 64)
1965 repr = append(repr, "func("...)
1966 for i, t := range ft.InSlice() {
1967 if i > 0 {
1968 repr = append(repr, ", "...)
1969 }
1970 if ft.IsVariadic() && i == int(ft.InCount)-1 {
1971 repr = append(repr, "..."...)
1972 repr = append(repr, stringFor((*sliceType)(unsafe.Pointer(t)).Elem)...)
1973 } else {
1974 repr = append(repr, stringFor(t)...)
1975 }
1976 }
1977 repr = append(repr, ')')
1978 out := ft.OutSlice()
1979 if len(out) == 1 {
1980 repr = append(repr, ' ')
1981 } else if len(out) > 1 {
1982 repr = append(repr, " ("...)
1983 }
1984 for i, t := range out {
1985 if i > 0 {
1986 repr = append(repr, ", "...)
1987 }
1988 repr = append(repr, stringFor(t)...)
1989 }
1990 if len(out) > 1 {
1991 repr = append(repr, ')')
1992 }
1993 return string(repr)
1994 }
1995
1996
1997
1998 func isReflexive(t *abi.Type) bool {
1999 switch Kind(t.Kind()) {
2000 case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, String, UnsafePointer:
2001 return true
2002 case Float32, Float64, Complex64, Complex128, Interface:
2003 return false
2004 case Array:
2005 tt := (*arrayType)(unsafe.Pointer(t))
2006 return isReflexive(tt.Elem)
2007 case Struct:
2008 tt := (*structType)(unsafe.Pointer(t))
2009 for _, f := range tt.Fields {
2010 if !isReflexive(f.Typ) {
2011 return false
2012 }
2013 }
2014 return true
2015 default:
2016
2017 panic("isReflexive called on non-key type " + stringFor(t))
2018 }
2019 }
2020
2021
2022 func needKeyUpdate(t *abi.Type) bool {
2023 switch Kind(t.Kind()) {
2024 case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, UnsafePointer:
2025 return false
2026 case Float32, Float64, Complex64, Complex128, Interface, String:
2027
2028
2029
2030 return true
2031 case Array:
2032 tt := (*arrayType)(unsafe.Pointer(t))
2033 return needKeyUpdate(tt.Elem)
2034 case Struct:
2035 tt := (*structType)(unsafe.Pointer(t))
2036 for _, f := range tt.Fields {
2037 if needKeyUpdate(f.Typ) {
2038 return true
2039 }
2040 }
2041 return false
2042 default:
2043
2044 panic("needKeyUpdate called on non-key type " + stringFor(t))
2045 }
2046 }
2047
2048
2049 func hashMightPanic(t *abi.Type) bool {
2050 switch Kind(t.Kind()) {
2051 case Interface:
2052 return true
2053 case Array:
2054 tt := (*arrayType)(unsafe.Pointer(t))
2055 return hashMightPanic(tt.Elem)
2056 case Struct:
2057 tt := (*structType)(unsafe.Pointer(t))
2058 for _, f := range tt.Fields {
2059 if hashMightPanic(f.Typ) {
2060 return true
2061 }
2062 }
2063 return false
2064 default:
2065 return false
2066 }
2067 }
2068
2069 func bucketOf(ktyp, etyp *abi.Type) *abi.Type {
2070 if ktyp.Size_ > abi.MapMaxKeyBytes {
2071 ktyp = ptrTo(ktyp)
2072 }
2073 if etyp.Size_ > abi.MapMaxElemBytes {
2074 etyp = ptrTo(etyp)
2075 }
2076
2077
2078
2079
2080
2081
2082 var gcdata *byte
2083 var ptrdata uintptr
2084
2085 size := abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize
2086 if size&uintptr(ktyp.Align_-1) != 0 || size&uintptr(etyp.Align_-1) != 0 {
2087 panic("reflect: bad size computation in MapOf")
2088 }
2089
2090 if ktyp.Pointers() || etyp.Pointers() {
2091 nptr := (abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize) / goarch.PtrSize
2092 n := (nptr + 7) / 8
2093
2094
2095 n = (n + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
2096 mask := make([]byte, n)
2097 base := uintptr(abi.MapBucketCount / goarch.PtrSize)
2098
2099 if ktyp.Pointers() {
2100 emitGCMask(mask, base, ktyp, abi.MapBucketCount)
2101 }
2102 base += abi.MapBucketCount * ktyp.Size_ / goarch.PtrSize
2103
2104 if etyp.Pointers() {
2105 emitGCMask(mask, base, etyp, abi.MapBucketCount)
2106 }
2107 base += abi.MapBucketCount * etyp.Size_ / goarch.PtrSize
2108
2109 word := base
2110 mask[word/8] |= 1 << (word % 8)
2111 gcdata = &mask[0]
2112 ptrdata = (word + 1) * goarch.PtrSize
2113
2114
2115 if ptrdata != size {
2116 panic("reflect: bad layout computation in MapOf")
2117 }
2118 }
2119
2120 b := &abi.Type{
2121 Align_: goarch.PtrSize,
2122 Size_: size,
2123 Kind_: abi.Struct,
2124 PtrBytes: ptrdata,
2125 GCData: gcdata,
2126 }
2127 s := "bucket(" + stringFor(ktyp) + "," + stringFor(etyp) + ")"
2128 b.Str = resolveReflectName(newName(s, "", false, false))
2129 return b
2130 }
2131
2132 func (t *rtype) gcSlice(begin, end uintptr) []byte {
2133 return (*[1 << 30]byte)(unsafe.Pointer(t.t.GCData))[begin:end:end]
2134 }
2135
2136
2137
2138 func emitGCMask(out []byte, base uintptr, typ *abi.Type, n uintptr) {
2139 if typ.Kind_&abi.KindGCProg != 0 {
2140 panic("reflect: unexpected GC program")
2141 }
2142 ptrs := typ.PtrBytes / goarch.PtrSize
2143 words := typ.Size_ / goarch.PtrSize
2144 mask := typ.GcSlice(0, (ptrs+7)/8)
2145 for j := uintptr(0); j < ptrs; j++ {
2146 if (mask[j/8]>>(j%8))&1 != 0 {
2147 for i := uintptr(0); i < n; i++ {
2148 k := base + i*words + j
2149 out[k/8] |= 1 << (k % 8)
2150 }
2151 }
2152 }
2153 }
2154
2155
2156
2157 func appendGCProg(dst []byte, typ *abi.Type) []byte {
2158 if typ.Kind_&abi.KindGCProg != 0 {
2159
2160 n := uintptr(*(*uint32)(unsafe.Pointer(typ.GCData)))
2161 prog := typ.GcSlice(4, 4+n-1)
2162 return append(dst, prog...)
2163 }
2164
2165
2166 ptrs := typ.PtrBytes / goarch.PtrSize
2167 mask := typ.GcSlice(0, (ptrs+7)/8)
2168
2169
2170 for ; ptrs > 120; ptrs -= 120 {
2171 dst = append(dst, 120)
2172 dst = append(dst, mask[:15]...)
2173 mask = mask[15:]
2174 }
2175
2176 dst = append(dst, byte(ptrs))
2177 dst = append(dst, mask...)
2178 return dst
2179 }
2180
2181
2182
2183 func SliceOf(t Type) Type {
2184 typ := t.common()
2185
2186
2187 ckey := cacheKey{Slice, typ, nil, 0}
2188 if slice, ok := lookupCache.Load(ckey); ok {
2189 return slice.(Type)
2190 }
2191
2192
2193 s := "[]" + stringFor(typ)
2194 for _, tt := range typesByString(s) {
2195 slice := (*sliceType)(unsafe.Pointer(tt))
2196 if slice.Elem == typ {
2197 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
2198 return ti.(Type)
2199 }
2200 }
2201
2202
2203 var islice any = ([]unsafe.Pointer)(nil)
2204 prototype := *(**sliceType)(unsafe.Pointer(&islice))
2205 slice := *prototype
2206 slice.TFlag = 0
2207 slice.Str = resolveReflectName(newName(s, "", false, false))
2208 slice.Hash = fnv1(typ.Hash, '[')
2209 slice.Elem = typ
2210 slice.PtrToThis = 0
2211
2212 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&slice.Type))
2213 return ti.(Type)
2214 }
2215
2216
2217
2218
2219 var structLookupCache struct {
2220 sync.Mutex
2221
2222
2223
2224 m sync.Map
2225 }
2226
2227 type structTypeUncommon struct {
2228 structType
2229 u uncommonType
2230 }
2231
2232
2233 func isLetter(ch rune) bool {
2234 return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= utf8.RuneSelf && unicode.IsLetter(ch)
2235 }
2236
2237
2238
2239
2240
2241
2242
2243 func isValidFieldName(fieldName string) bool {
2244 for i, c := range fieldName {
2245 if i == 0 && !isLetter(c) {
2246 return false
2247 }
2248
2249 if !(isLetter(c) || unicode.IsDigit(c)) {
2250 return false
2251 }
2252 }
2253
2254 return len(fieldName) > 0
2255 }
2256
2257
2258 func isRegularMemory(t Type) bool {
2259 switch t.Kind() {
2260 case Array:
2261 elem := t.Elem()
2262 if isRegularMemory(elem) {
2263 return true
2264 }
2265 return elem.Comparable() && t.Len() == 0
2266 case Int8, Int16, Int32, Int64, Int, Uint8, Uint16, Uint32, Uint64, Uint, Uintptr, Chan, Pointer, Bool, UnsafePointer:
2267 return true
2268 case Struct:
2269 num := t.NumField()
2270 switch num {
2271 case 0:
2272 return true
2273 case 1:
2274 field := t.Field(0)
2275 if field.Name == "_" {
2276 return false
2277 }
2278 return isRegularMemory(field.Type)
2279 default:
2280 for i := range num {
2281 field := t.Field(i)
2282 if field.Name == "_" || !isRegularMemory(field.Type) || isPaddedField(t, i) {
2283 return false
2284 }
2285 }
2286 return true
2287 }
2288 }
2289 return false
2290 }
2291
2292
2293
2294 func isPaddedField(t Type, i int) bool {
2295 field := t.Field(i)
2296 if i+1 < t.NumField() {
2297 return field.Offset+field.Type.Size() != t.Field(i+1).Offset
2298 }
2299 return field.Offset+field.Type.Size() != t.Size()
2300 }
2301
2302
2303
2304
2305
2306
2307
2308 func StructOf(fields []StructField) Type {
2309 var (
2310 hash = fnv1(0, []byte("struct {")...)
2311 size uintptr
2312 typalign uint8
2313 comparable = true
2314 methods []abi.Method
2315
2316 fs = make([]structField, len(fields))
2317 repr = make([]byte, 0, 64)
2318 fset = map[string]struct{}{}
2319
2320 hasGCProg = false
2321 )
2322
2323 lastzero := uintptr(0)
2324 repr = append(repr, "struct {"...)
2325 pkgpath := ""
2326 for i, field := range fields {
2327 if field.Name == "" {
2328 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no name")
2329 }
2330 if !isValidFieldName(field.Name) {
2331 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has invalid name")
2332 }
2333 if field.Type == nil {
2334 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no type")
2335 }
2336 f, fpkgpath := runtimeStructField(field)
2337 ft := f.Typ
2338 if ft.Kind_&abi.KindGCProg != 0 {
2339 hasGCProg = true
2340 }
2341 if fpkgpath != "" {
2342 if pkgpath == "" {
2343 pkgpath = fpkgpath
2344 } else if pkgpath != fpkgpath {
2345 panic("reflect.Struct: fields with different PkgPath " + pkgpath + " and " + fpkgpath)
2346 }
2347 }
2348
2349
2350 name := f.Name.Name()
2351 hash = fnv1(hash, []byte(name)...)
2352 if !f.Embedded() {
2353 repr = append(repr, (" " + name)...)
2354 } else {
2355
2356 if f.Typ.Kind() == abi.Pointer {
2357
2358 elem := ft.Elem()
2359 if k := elem.Kind(); k == abi.Pointer || k == abi.Interface {
2360 panic("reflect.StructOf: illegal embedded field type " + stringFor(ft))
2361 }
2362 }
2363
2364 switch Kind(f.Typ.Kind()) {
2365 case Interface:
2366 ift := (*interfaceType)(unsafe.Pointer(ft))
2367 for _, m := range ift.Methods {
2368 if pkgPath(ift.nameOff(m.Name)) != "" {
2369
2370 panic("reflect: embedded interface with unexported method(s) not implemented")
2371 }
2372
2373 fnStub := resolveReflectText(unsafe.Pointer(abi.FuncPCABIInternal(embeddedIfaceMethStub)))
2374 methods = append(methods, abi.Method{
2375 Name: resolveReflectName(ift.nameOff(m.Name)),
2376 Mtyp: resolveReflectType(ift.typeOff(m.Typ)),
2377 Ifn: fnStub,
2378 Tfn: fnStub,
2379 })
2380 }
2381 case Pointer:
2382 ptr := (*ptrType)(unsafe.Pointer(ft))
2383 if unt := ptr.Uncommon(); unt != nil {
2384 if i > 0 && unt.Mcount > 0 {
2385
2386 panic("reflect: embedded type with methods not implemented if type is not first field")
2387 }
2388 if len(fields) > 1 {
2389 panic("reflect: embedded type with methods not implemented if there is more than one field")
2390 }
2391 for _, m := range unt.Methods() {
2392 mname := nameOffFor(ft, m.Name)
2393 if pkgPath(mname) != "" {
2394
2395
2396 panic("reflect: embedded interface with unexported method(s) not implemented")
2397 }
2398 methods = append(methods, abi.Method{
2399 Name: resolveReflectName(mname),
2400 Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)),
2401 Ifn: resolveReflectText(textOffFor(ft, m.Ifn)),
2402 Tfn: resolveReflectText(textOffFor(ft, m.Tfn)),
2403 })
2404 }
2405 }
2406 if unt := ptr.Elem.Uncommon(); unt != nil {
2407 for _, m := range unt.Methods() {
2408 mname := nameOffFor(ft, m.Name)
2409 if pkgPath(mname) != "" {
2410
2411
2412 panic("reflect: embedded interface with unexported method(s) not implemented")
2413 }
2414 methods = append(methods, abi.Method{
2415 Name: resolveReflectName(mname),
2416 Mtyp: resolveReflectType(typeOffFor(ptr.Elem, m.Mtyp)),
2417 Ifn: resolveReflectText(textOffFor(ptr.Elem, m.Ifn)),
2418 Tfn: resolveReflectText(textOffFor(ptr.Elem, m.Tfn)),
2419 })
2420 }
2421 }
2422 default:
2423 if unt := ft.Uncommon(); unt != nil {
2424 if i > 0 && unt.Mcount > 0 {
2425
2426 panic("reflect: embedded type with methods not implemented if type is not first field")
2427 }
2428 if len(fields) > 1 && ft.Kind_&abi.KindDirectIface != 0 {
2429 panic("reflect: embedded type with methods not implemented for non-pointer type")
2430 }
2431 for _, m := range unt.Methods() {
2432 mname := nameOffFor(ft, m.Name)
2433 if pkgPath(mname) != "" {
2434
2435
2436 panic("reflect: embedded interface with unexported method(s) not implemented")
2437 }
2438 methods = append(methods, abi.Method{
2439 Name: resolveReflectName(mname),
2440 Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)),
2441 Ifn: resolveReflectText(textOffFor(ft, m.Ifn)),
2442 Tfn: resolveReflectText(textOffFor(ft, m.Tfn)),
2443 })
2444
2445 }
2446 }
2447 }
2448 }
2449 if _, dup := fset[name]; dup && name != "_" {
2450 panic("reflect.StructOf: duplicate field " + name)
2451 }
2452 fset[name] = struct{}{}
2453
2454 hash = fnv1(hash, byte(ft.Hash>>24), byte(ft.Hash>>16), byte(ft.Hash>>8), byte(ft.Hash))
2455
2456 repr = append(repr, (" " + stringFor(ft))...)
2457 if f.Name.HasTag() {
2458 hash = fnv1(hash, []byte(f.Name.Tag())...)
2459 repr = append(repr, (" " + strconv.Quote(f.Name.Tag()))...)
2460 }
2461 if i < len(fields)-1 {
2462 repr = append(repr, ';')
2463 }
2464
2465 comparable = comparable && (ft.Equal != nil)
2466
2467 offset := align(size, uintptr(ft.Align_))
2468 if offset < size {
2469 panic("reflect.StructOf: struct size would exceed virtual address space")
2470 }
2471 if ft.Align_ > typalign {
2472 typalign = ft.Align_
2473 }
2474 size = offset + ft.Size_
2475 if size < offset {
2476 panic("reflect.StructOf: struct size would exceed virtual address space")
2477 }
2478 f.Offset = offset
2479
2480 if ft.Size_ == 0 {
2481 lastzero = size
2482 }
2483
2484 fs[i] = f
2485 }
2486
2487 if size > 0 && lastzero == size {
2488
2489
2490
2491
2492
2493 size++
2494 if size == 0 {
2495 panic("reflect.StructOf: struct size would exceed virtual address space")
2496 }
2497 }
2498
2499 var typ *structType
2500 var ut *uncommonType
2501
2502 if len(methods) == 0 {
2503 t := new(structTypeUncommon)
2504 typ = &t.structType
2505 ut = &t.u
2506 } else {
2507
2508
2509
2510
2511
2512 tt := New(StructOf([]StructField{
2513 {Name: "S", Type: TypeOf(structType{})},
2514 {Name: "U", Type: TypeOf(uncommonType{})},
2515 {Name: "M", Type: ArrayOf(len(methods), TypeOf(methods[0]))},
2516 }))
2517
2518 typ = (*structType)(tt.Elem().Field(0).Addr().UnsafePointer())
2519 ut = (*uncommonType)(tt.Elem().Field(1).Addr().UnsafePointer())
2520
2521 copy(tt.Elem().Field(2).Slice(0, len(methods)).Interface().([]abi.Method), methods)
2522 }
2523
2524
2525
2526
2527 ut.Mcount = uint16(len(methods))
2528 ut.Xcount = ut.Mcount
2529 ut.Moff = uint32(unsafe.Sizeof(uncommonType{}))
2530
2531 if len(fs) > 0 {
2532 repr = append(repr, ' ')
2533 }
2534 repr = append(repr, '}')
2535 hash = fnv1(hash, '}')
2536 str := string(repr)
2537
2538
2539 s := align(size, uintptr(typalign))
2540 if s < size {
2541 panic("reflect.StructOf: struct size would exceed virtual address space")
2542 }
2543 size = s
2544
2545
2546 var istruct any = struct{}{}
2547 prototype := *(**structType)(unsafe.Pointer(&istruct))
2548 *typ = *prototype
2549 typ.Fields = fs
2550 if pkgpath != "" {
2551 typ.PkgPath = newName(pkgpath, "", false, false)
2552 }
2553
2554
2555 if ts, ok := structLookupCache.m.Load(hash); ok {
2556 for _, st := range ts.([]Type) {
2557 t := st.common()
2558 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2559 return toType(t)
2560 }
2561 }
2562 }
2563
2564
2565 structLookupCache.Lock()
2566 defer structLookupCache.Unlock()
2567 if ts, ok := structLookupCache.m.Load(hash); ok {
2568 for _, st := range ts.([]Type) {
2569 t := st.common()
2570 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2571 return toType(t)
2572 }
2573 }
2574 }
2575
2576 addToCache := func(t Type) Type {
2577 var ts []Type
2578 if ti, ok := structLookupCache.m.Load(hash); ok {
2579 ts = ti.([]Type)
2580 }
2581 structLookupCache.m.Store(hash, append(ts, t))
2582 return t
2583 }
2584
2585
2586 for _, t := range typesByString(str) {
2587 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2588
2589
2590
2591 return addToCache(toType(t))
2592 }
2593 }
2594
2595 typ.Str = resolveReflectName(newName(str, "", false, false))
2596 if isRegularMemory(toType(&typ.Type)) {
2597 typ.TFlag = abi.TFlagRegularMemory
2598 } else {
2599 typ.TFlag = 0
2600 }
2601 typ.Hash = hash
2602 typ.Size_ = size
2603 typ.PtrBytes = typeptrdata(&typ.Type)
2604 typ.Align_ = typalign
2605 typ.FieldAlign_ = typalign
2606 typ.PtrToThis = 0
2607 if len(methods) > 0 {
2608 typ.TFlag |= abi.TFlagUncommon
2609 }
2610
2611 if hasGCProg {
2612 lastPtrField := 0
2613 for i, ft := range fs {
2614 if ft.Typ.Pointers() {
2615 lastPtrField = i
2616 }
2617 }
2618 prog := []byte{0, 0, 0, 0}
2619 var off uintptr
2620 for i, ft := range fs {
2621 if i > lastPtrField {
2622
2623
2624 break
2625 }
2626 if !ft.Typ.Pointers() {
2627
2628 continue
2629 }
2630
2631 if ft.Offset > off {
2632 n := (ft.Offset - off) / goarch.PtrSize
2633 prog = append(prog, 0x01, 0x00)
2634 if n > 1 {
2635 prog = append(prog, 0x81)
2636 prog = appendVarint(prog, n-1)
2637 }
2638 off = ft.Offset
2639 }
2640
2641 prog = appendGCProg(prog, ft.Typ)
2642 off += ft.Typ.PtrBytes
2643 }
2644 prog = append(prog, 0)
2645 *(*uint32)(unsafe.Pointer(&prog[0])) = uint32(len(prog) - 4)
2646 typ.Kind_ |= abi.KindGCProg
2647 typ.GCData = &prog[0]
2648 } else {
2649 typ.Kind_ &^= abi.KindGCProg
2650 bv := new(bitVector)
2651 addTypeBits(bv, 0, &typ.Type)
2652 if len(bv.data) > 0 {
2653 typ.GCData = &bv.data[0]
2654 }
2655 }
2656 typ.Equal = nil
2657 if comparable {
2658 typ.Equal = func(p, q unsafe.Pointer) bool {
2659 for _, ft := range typ.Fields {
2660 pi := add(p, ft.Offset, "&x.field safe")
2661 qi := add(q, ft.Offset, "&x.field safe")
2662 if !ft.Typ.Equal(pi, qi) {
2663 return false
2664 }
2665 }
2666 return true
2667 }
2668 }
2669
2670 switch {
2671 case len(fs) == 1 && !fs[0].Typ.IfaceIndir():
2672
2673 typ.Kind_ |= abi.KindDirectIface
2674 default:
2675 typ.Kind_ &^= abi.KindDirectIface
2676 }
2677
2678 return addToCache(toType(&typ.Type))
2679 }
2680
2681 func embeddedIfaceMethStub() {
2682 panic("reflect: StructOf does not support methods of embedded interfaces")
2683 }
2684
2685
2686
2687
2688 func runtimeStructField(field StructField) (structField, string) {
2689 if field.Anonymous && field.PkgPath != "" {
2690 panic("reflect.StructOf: field \"" + field.Name + "\" is anonymous but has PkgPath set")
2691 }
2692
2693 if field.IsExported() {
2694
2695
2696 c := field.Name[0]
2697 if 'a' <= c && c <= 'z' || c == '_' {
2698 panic("reflect.StructOf: field \"" + field.Name + "\" is unexported but missing PkgPath")
2699 }
2700 }
2701
2702 resolveReflectType(field.Type.common())
2703 f := structField{
2704 Name: newName(field.Name, string(field.Tag), field.IsExported(), field.Anonymous),
2705 Typ: field.Type.common(),
2706 Offset: 0,
2707 }
2708 return f, field.PkgPath
2709 }
2710
2711
2712
2713
2714 func typeptrdata(t *abi.Type) uintptr {
2715 switch t.Kind() {
2716 case abi.Struct:
2717 st := (*structType)(unsafe.Pointer(t))
2718
2719 field := -1
2720 for i := range st.Fields {
2721 ft := st.Fields[i].Typ
2722 if ft.Pointers() {
2723 field = i
2724 }
2725 }
2726 if field == -1 {
2727 return 0
2728 }
2729 f := st.Fields[field]
2730 return f.Offset + f.Typ.PtrBytes
2731
2732 default:
2733 panic("reflect.typeptrdata: unexpected type, " + stringFor(t))
2734 }
2735 }
2736
2737
2738
2739
2740
2741
2742 func ArrayOf(length int, elem Type) Type {
2743 if length < 0 {
2744 panic("reflect: negative length passed to ArrayOf")
2745 }
2746
2747 typ := elem.common()
2748
2749
2750 ckey := cacheKey{Array, typ, nil, uintptr(length)}
2751 if array, ok := lookupCache.Load(ckey); ok {
2752 return array.(Type)
2753 }
2754
2755
2756 s := "[" + strconv.Itoa(length) + "]" + stringFor(typ)
2757 for _, tt := range typesByString(s) {
2758 array := (*arrayType)(unsafe.Pointer(tt))
2759 if array.Elem == typ {
2760 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
2761 return ti.(Type)
2762 }
2763 }
2764
2765
2766 var iarray any = [1]unsafe.Pointer{}
2767 prototype := *(**arrayType)(unsafe.Pointer(&iarray))
2768 array := *prototype
2769 array.TFlag = typ.TFlag & abi.TFlagRegularMemory
2770 array.Str = resolveReflectName(newName(s, "", false, false))
2771 array.Hash = fnv1(typ.Hash, '[')
2772 for n := uint32(length); n > 0; n >>= 8 {
2773 array.Hash = fnv1(array.Hash, byte(n))
2774 }
2775 array.Hash = fnv1(array.Hash, ']')
2776 array.Elem = typ
2777 array.PtrToThis = 0
2778 if typ.Size_ > 0 {
2779 max := ^uintptr(0) / typ.Size_
2780 if uintptr(length) > max {
2781 panic("reflect.ArrayOf: array size would exceed virtual address space")
2782 }
2783 }
2784 array.Size_ = typ.Size_ * uintptr(length)
2785 if length > 0 && typ.Pointers() {
2786 array.PtrBytes = typ.Size_*uintptr(length-1) + typ.PtrBytes
2787 }
2788 array.Align_ = typ.Align_
2789 array.FieldAlign_ = typ.FieldAlign_
2790 array.Len = uintptr(length)
2791 array.Slice = &(SliceOf(elem).(*rtype).t)
2792
2793 switch {
2794 case !typ.Pointers() || array.Size_ == 0:
2795
2796 array.GCData = nil
2797 array.PtrBytes = 0
2798
2799 case length == 1:
2800
2801 array.Kind_ |= typ.Kind_ & abi.KindGCProg
2802 array.GCData = typ.GCData
2803 array.PtrBytes = typ.PtrBytes
2804
2805 case typ.Kind_&abi.KindGCProg == 0 && array.Size_ <= abi.MaxPtrmaskBytes*8*goarch.PtrSize:
2806
2807
2808
2809 n := (array.PtrBytes/goarch.PtrSize + 7) / 8
2810
2811 n = (n + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
2812 mask := make([]byte, n)
2813 emitGCMask(mask, 0, typ, array.Len)
2814 array.GCData = &mask[0]
2815
2816 default:
2817
2818
2819 prog := []byte{0, 0, 0, 0}
2820 prog = appendGCProg(prog, typ)
2821
2822 elemPtrs := typ.PtrBytes / goarch.PtrSize
2823 elemWords := typ.Size_ / goarch.PtrSize
2824 if elemPtrs < elemWords {
2825
2826 prog = append(prog, 0x01, 0x00)
2827 if elemPtrs+1 < elemWords {
2828 prog = append(prog, 0x81)
2829 prog = appendVarint(prog, elemWords-elemPtrs-1)
2830 }
2831 }
2832
2833 if elemWords < 0x80 {
2834 prog = append(prog, byte(elemWords|0x80))
2835 } else {
2836 prog = append(prog, 0x80)
2837 prog = appendVarint(prog, elemWords)
2838 }
2839 prog = appendVarint(prog, uintptr(length)-1)
2840 prog = append(prog, 0)
2841 *(*uint32)(unsafe.Pointer(&prog[0])) = uint32(len(prog) - 4)
2842 array.Kind_ |= abi.KindGCProg
2843 array.GCData = &prog[0]
2844 array.PtrBytes = array.Size_
2845 }
2846
2847 etyp := typ
2848 esize := etyp.Size()
2849
2850 array.Equal = nil
2851 if eequal := etyp.Equal; eequal != nil {
2852 array.Equal = func(p, q unsafe.Pointer) bool {
2853 for i := 0; i < length; i++ {
2854 pi := arrayAt(p, i, esize, "i < length")
2855 qi := arrayAt(q, i, esize, "i < length")
2856 if !eequal(pi, qi) {
2857 return false
2858 }
2859
2860 }
2861 return true
2862 }
2863 }
2864
2865 switch {
2866 case length == 1 && !typ.IfaceIndir():
2867
2868 array.Kind_ |= abi.KindDirectIface
2869 default:
2870 array.Kind_ &^= abi.KindDirectIface
2871 }
2872
2873 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&array.Type))
2874 return ti.(Type)
2875 }
2876
2877 func appendVarint(x []byte, v uintptr) []byte {
2878 for ; v >= 0x80; v >>= 7 {
2879 x = append(x, byte(v|0x80))
2880 }
2881 x = append(x, byte(v))
2882 return x
2883 }
2884
2885
2886
2887
2888
2889
2890 func toType(t *abi.Type) Type {
2891 if t == nil {
2892 return nil
2893 }
2894 return toRType(t)
2895 }
2896
2897 type layoutKey struct {
2898 ftyp *funcType
2899 rcvr *abi.Type
2900 }
2901
2902 type layoutType struct {
2903 t *abi.Type
2904 framePool *sync.Pool
2905 abid abiDesc
2906 }
2907
2908 var layoutCache sync.Map
2909
2910
2911
2912
2913
2914
2915
2916
2917 func funcLayout(t *funcType, rcvr *abi.Type) (frametype *abi.Type, framePool *sync.Pool, abid abiDesc) {
2918 if t.Kind() != abi.Func {
2919 panic("reflect: funcLayout of non-func type " + stringFor(&t.Type))
2920 }
2921 if rcvr != nil && rcvr.Kind() == abi.Interface {
2922 panic("reflect: funcLayout with interface receiver " + stringFor(rcvr))
2923 }
2924 k := layoutKey{t, rcvr}
2925 if lti, ok := layoutCache.Load(k); ok {
2926 lt := lti.(layoutType)
2927 return lt.t, lt.framePool, lt.abid
2928 }
2929
2930
2931 abid = newAbiDesc(t, rcvr)
2932
2933
2934 x := &abi.Type{
2935 Align_: goarch.PtrSize,
2936
2937
2938
2939
2940 Size_: align(abid.retOffset+abid.ret.stackBytes, goarch.PtrSize),
2941 PtrBytes: uintptr(abid.stackPtrs.n) * goarch.PtrSize,
2942 }
2943 if abid.stackPtrs.n > 0 {
2944 x.GCData = &abid.stackPtrs.data[0]
2945 }
2946
2947 var s string
2948 if rcvr != nil {
2949 s = "methodargs(" + stringFor(rcvr) + ")(" + stringFor(&t.Type) + ")"
2950 } else {
2951 s = "funcargs(" + stringFor(&t.Type) + ")"
2952 }
2953 x.Str = resolveReflectName(newName(s, "", false, false))
2954
2955
2956 framePool = &sync.Pool{New: func() any {
2957 return unsafe_New(x)
2958 }}
2959 lti, _ := layoutCache.LoadOrStore(k, layoutType{
2960 t: x,
2961 framePool: framePool,
2962 abid: abid,
2963 })
2964 lt := lti.(layoutType)
2965 return lt.t, lt.framePool, lt.abid
2966 }
2967
2968
2969 type bitVector struct {
2970 n uint32
2971 data []byte
2972 }
2973
2974
2975 func (bv *bitVector) append(bit uint8) {
2976 if bv.n%(8*goarch.PtrSize) == 0 {
2977
2978
2979
2980 for i := 0; i < goarch.PtrSize; i++ {
2981 bv.data = append(bv.data, 0)
2982 }
2983 }
2984 bv.data[bv.n/8] |= bit << (bv.n % 8)
2985 bv.n++
2986 }
2987
2988 func addTypeBits(bv *bitVector, offset uintptr, t *abi.Type) {
2989 if !t.Pointers() {
2990 return
2991 }
2992
2993 switch Kind(t.Kind_ & abi.KindMask) {
2994 case Chan, Func, Map, Pointer, Slice, String, UnsafePointer:
2995
2996 for bv.n < uint32(offset/goarch.PtrSize) {
2997 bv.append(0)
2998 }
2999 bv.append(1)
3000
3001 case Interface:
3002
3003 for bv.n < uint32(offset/goarch.PtrSize) {
3004 bv.append(0)
3005 }
3006 bv.append(1)
3007 bv.append(1)
3008
3009 case Array:
3010
3011 tt := (*arrayType)(unsafe.Pointer(t))
3012 for i := 0; i < int(tt.Len); i++ {
3013 addTypeBits(bv, offset+uintptr(i)*tt.Elem.Size_, tt.Elem)
3014 }
3015
3016 case Struct:
3017
3018 tt := (*structType)(unsafe.Pointer(t))
3019 for i := range tt.Fields {
3020 f := &tt.Fields[i]
3021 addTypeBits(bv, offset+f.Offset, f.Typ)
3022 }
3023 }
3024 }
3025
3026
3027 func TypeFor[T any]() Type {
3028 var v T
3029 if t := TypeOf(v); t != nil {
3030 return t
3031 }
3032 return TypeOf((*T)(nil)).Elem()
3033 }
3034
3035
3036
3037
3038 func ifaceIndir(t *abi.Type) bool {
3039 return t.Kind_&abi.KindDirectIface == 0
3040 }
3041
View as plain text