Source file
src/reflect/type.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 package reflect
17
18 import (
19 "internal/abi"
20 "internal/goarch"
21 "strconv"
22 "sync"
23 "unicode"
24 "unicode/utf8"
25 "unsafe"
26 )
27
28
29
30
31
32
33
34
35
36
37
38
39 type Type interface {
40
41
42
43
44 Align() int
45
46
47
48 FieldAlign() int
49
50
51
52
53
54
55
56
57
58
59
60
61 Method(int) Method
62
63
64
65
66
67
68
69
70
71 MethodByName(string) (Method, bool)
72
73
74
75
76
77
78 NumMethod() int
79
80
81
82 Name() string
83
84
85
86
87
88
89 PkgPath() string
90
91
92
93 Size() uintptr
94
95
96
97
98
99
100 String() string
101
102
103 Kind() Kind
104
105
106 Implements(u Type) bool
107
108
109 AssignableTo(u Type) bool
110
111
112
113
114
115 ConvertibleTo(u Type) bool
116
117
118
119
120
121 Comparable() bool
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138 Bits() int
139
140
141
142 ChanDir() ChanDir
143
144
145
146
147
148
149
150
151
152
153
154
155
156 IsVariadic() bool
157
158
159
160 Elem() Type
161
162
163
164
165 Field(i int) StructField
166
167
168
169
170
171 FieldByIndex(index []int) StructField
172
173
174
175
176
177
178 FieldByName(name string) (StructField, bool)
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196 FieldByNameFunc(match func(string) bool) (StructField, bool)
197
198
199
200
201 In(i int) Type
202
203
204
205 Key() Type
206
207
208
209 Len() int
210
211
212
213 NumField() int
214
215
216
217 NumIn() int
218
219
220
221 NumOut() int
222
223
224
225
226 Out(i int) Type
227
228
229
230 OverflowComplex(x complex128) bool
231
232
233
234 OverflowFloat(x float64) bool
235
236
237
238 OverflowInt(x int64) bool
239
240
241
242 OverflowUint(x uint64) bool
243
244
245 CanSeq() bool
246
247
248 CanSeq2() bool
249
250 common() *abi.Type
251 uncommon() *uncommonType
252 }
253
254
255
256
257
258
259
260
261
262
267
268
269
270 type Kind uint
271
272 const (
273 Invalid Kind = iota
274 Bool
275 Int
276 Int8
277 Int16
278 Int32
279 Int64
280 Uint
281 Uint8
282 Uint16
283 Uint32
284 Uint64
285 Uintptr
286 Float32
287 Float64
288 Complex64
289 Complex128
290 Array
291 Chan
292 Func
293 Interface
294 Map
295 Pointer
296 Slice
297 String
298 Struct
299 UnsafePointer
300 )
301
302
303 const Ptr = Pointer
304
305
306
307
308
309 type uncommonType = abi.UncommonType
310
311
312 type common struct {
313 abi.Type
314 }
315
316
317
318 type rtype struct {
319 t abi.Type
320 }
321
322 func (t *rtype) common() *abi.Type {
323 return &t.t
324 }
325
326 func (t *rtype) uncommon() *abi.UncommonType {
327 return t.t.Uncommon()
328 }
329
330 type aNameOff = abi.NameOff
331 type aTypeOff = abi.TypeOff
332 type aTextOff = abi.TextOff
333
334
335 type ChanDir int
336
337 const (
338 RecvDir ChanDir = 1 << iota
339 SendDir
340 BothDir = RecvDir | SendDir
341 )
342
343
344 type arrayType = abi.ArrayType
345
346
347 type chanType = abi.ChanType
348
349
350
351
352
353
354
355
356
357
358
359
360 type funcType = abi.FuncType
361
362
363 type interfaceType struct {
364 abi.InterfaceType
365 }
366
367 func (t *interfaceType) nameOff(off aNameOff) abi.Name {
368 return toRType(&t.Type).nameOff(off)
369 }
370
371 func nameOffFor(t *abi.Type, off aNameOff) abi.Name {
372 return toRType(t).nameOff(off)
373 }
374
375 func typeOffFor(t *abi.Type, off aTypeOff) *abi.Type {
376 return toRType(t).typeOff(off)
377 }
378
379 func (t *interfaceType) typeOff(off aTypeOff) *abi.Type {
380 return toRType(&t.Type).typeOff(off)
381 }
382
383 func (t *interfaceType) common() *abi.Type {
384 return &t.Type
385 }
386
387 func (t *interfaceType) uncommon() *abi.UncommonType {
388 return t.Uncommon()
389 }
390
391
392 type mapType struct {
393 abi.MapType
394 }
395
396
397 type ptrType struct {
398 abi.PtrType
399 }
400
401
402 type sliceType struct {
403 abi.SliceType
404 }
405
406
407 type structField = abi.StructField
408
409
410 type structType struct {
411 abi.StructType
412 }
413
414 func pkgPath(n abi.Name) string {
415 if n.Bytes == nil || *n.DataChecked(0, "name flag field")&(1<<2) == 0 {
416 return ""
417 }
418 i, l := n.ReadVarint(1)
419 off := 1 + i + l
420 if n.HasTag() {
421 i2, l2 := n.ReadVarint(off)
422 off += i2 + l2
423 }
424 var nameOff int32
425
426
427 copy((*[4]byte)(unsafe.Pointer(&nameOff))[:], (*[4]byte)(unsafe.Pointer(n.DataChecked(off, "name offset field")))[:])
428 pkgPathName := abi.Name{Bytes: (*byte)(resolveTypeOff(unsafe.Pointer(n.Bytes), nameOff))}
429 return pkgPathName.Name()
430 }
431
432 func newName(n, tag string, exported, embedded bool) abi.Name {
433 return abi.NewName(n, tag, exported, embedded)
434 }
435
436
440
441
442 type Method struct {
443
444 Name string
445
446
447
448
449
450
451 PkgPath string
452
453 Type Type
454 Func Value
455 Index int
456 }
457
458
459 func (m Method) IsExported() bool {
460 return m.PkgPath == ""
461 }
462
463
464 func (k Kind) String() string {
465 if uint(k) < uint(len(kindNames)) {
466 return kindNames[uint(k)]
467 }
468 return "kind" + strconv.Itoa(int(k))
469 }
470
471 var kindNames = []string{
472 Invalid: "invalid",
473 Bool: "bool",
474 Int: "int",
475 Int8: "int8",
476 Int16: "int16",
477 Int32: "int32",
478 Int64: "int64",
479 Uint: "uint",
480 Uint8: "uint8",
481 Uint16: "uint16",
482 Uint32: "uint32",
483 Uint64: "uint64",
484 Uintptr: "uintptr",
485 Float32: "float32",
486 Float64: "float64",
487 Complex64: "complex64",
488 Complex128: "complex128",
489 Array: "array",
490 Chan: "chan",
491 Func: "func",
492 Interface: "interface",
493 Map: "map",
494 Pointer: "ptr",
495 Slice: "slice",
496 String: "string",
497 Struct: "struct",
498 UnsafePointer: "unsafe.Pointer",
499 }
500
501
502
503
504
505
506 func resolveNameOff(ptrInModule unsafe.Pointer, off int32) unsafe.Pointer
507
508
509
510
511
512
513 func resolveTypeOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
514
515
516
517
518
519
520 func resolveTextOff(rtype unsafe.Pointer, off int32) unsafe.Pointer
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536 func addReflectOff(ptr unsafe.Pointer) int32
537
538
539
540 func resolveReflectName(n abi.Name) aNameOff {
541 return aNameOff(addReflectOff(unsafe.Pointer(n.Bytes)))
542 }
543
544
545
546 func resolveReflectType(t *abi.Type) aTypeOff {
547 return aTypeOff(addReflectOff(unsafe.Pointer(t)))
548 }
549
550
551
552
553 func resolveReflectText(ptr unsafe.Pointer) aTextOff {
554 return aTextOff(addReflectOff(ptr))
555 }
556
557 func (t *rtype) nameOff(off aNameOff) abi.Name {
558 return abi.Name{Bytes: (*byte)(resolveNameOff(unsafe.Pointer(t), int32(off)))}
559 }
560
561 func (t *rtype) typeOff(off aTypeOff) *abi.Type {
562 return (*abi.Type)(resolveTypeOff(unsafe.Pointer(t), int32(off)))
563 }
564
565 func (t *rtype) textOff(off aTextOff) unsafe.Pointer {
566 return resolveTextOff(unsafe.Pointer(t), int32(off))
567 }
568
569 func textOffFor(t *abi.Type, off aTextOff) unsafe.Pointer {
570 return toRType(t).textOff(off)
571 }
572
573 func (t *rtype) String() string {
574 s := t.nameOff(t.t.Str).Name()
575 if t.t.TFlag&abi.TFlagExtraStar != 0 {
576 return s[1:]
577 }
578 return s
579 }
580
581 func (t *rtype) Size() uintptr { return t.t.Size() }
582
583 func (t *rtype) Bits() int {
584 if t == nil {
585 panic("reflect: Bits of nil Type")
586 }
587 k := t.Kind()
588 if k < Int || k > Complex128 {
589 panic("reflect: Bits of non-arithmetic Type " + t.String())
590 }
591 return int(t.t.Size_) * 8
592 }
593
594 func (t *rtype) Align() int { return t.t.Align() }
595
596 func (t *rtype) FieldAlign() int { return t.t.FieldAlign() }
597
598 func (t *rtype) Kind() Kind { return Kind(t.t.Kind()) }
599
600 func (t *rtype) exportedMethods() []abi.Method {
601 ut := t.uncommon()
602 if ut == nil {
603 return nil
604 }
605 return ut.ExportedMethods()
606 }
607
608 func (t *rtype) NumMethod() int {
609 if t.Kind() == Interface {
610 tt := (*interfaceType)(unsafe.Pointer(t))
611 return tt.NumMethod()
612 }
613 return len(t.exportedMethods())
614 }
615
616 func (t *rtype) Method(i int) (m Method) {
617 if t.Kind() == Interface {
618 tt := (*interfaceType)(unsafe.Pointer(t))
619 return tt.Method(i)
620 }
621 methods := t.exportedMethods()
622 if i < 0 || i >= len(methods) {
623 panic("reflect: Method index out of range")
624 }
625 p := methods[i]
626 pname := t.nameOff(p.Name)
627 m.Name = pname.Name()
628 fl := flag(Func)
629 mtyp := t.typeOff(p.Mtyp)
630 ft := (*funcType)(unsafe.Pointer(mtyp))
631 in := make([]Type, 0, 1+ft.NumIn())
632 in = append(in, t)
633 for _, arg := range ft.InSlice() {
634 in = append(in, toRType(arg))
635 }
636 out := make([]Type, 0, ft.NumOut())
637 for _, ret := range ft.OutSlice() {
638 out = append(out, toRType(ret))
639 }
640 mt := FuncOf(in, out, ft.IsVariadic())
641 m.Type = mt
642 tfn := t.textOff(p.Tfn)
643 fn := unsafe.Pointer(&tfn)
644 m.Func = Value{&mt.(*rtype).t, fn, fl}
645
646 m.Index = i
647 return m
648 }
649
650 func (t *rtype) MethodByName(name string) (m Method, ok bool) {
651 if t.Kind() == Interface {
652 tt := (*interfaceType)(unsafe.Pointer(t))
653 return tt.MethodByName(name)
654 }
655 ut := t.uncommon()
656 if ut == nil {
657 return Method{}, false
658 }
659
660 methods := ut.ExportedMethods()
661
662
663
664 i, j := 0, len(methods)
665 for i < j {
666 h := int(uint(i+j) >> 1)
667
668 if !(t.nameOff(methods[h].Name).Name() >= name) {
669 i = h + 1
670 } else {
671 j = h
672 }
673 }
674
675 if i < len(methods) && name == t.nameOff(methods[i].Name).Name() {
676 return t.Method(i), true
677 }
678
679 return Method{}, false
680 }
681
682 func (t *rtype) PkgPath() string {
683 if t.t.TFlag&abi.TFlagNamed == 0 {
684 return ""
685 }
686 ut := t.uncommon()
687 if ut == nil {
688 return ""
689 }
690 return t.nameOff(ut.PkgPath).Name()
691 }
692
693 func pkgPathFor(t *abi.Type) string {
694 return toRType(t).PkgPath()
695 }
696
697 func (t *rtype) Name() string {
698 if !t.t.HasName() {
699 return ""
700 }
701 s := t.String()
702 i := len(s) - 1
703 sqBrackets := 0
704 for i >= 0 && (s[i] != '.' || sqBrackets != 0) {
705 switch s[i] {
706 case ']':
707 sqBrackets++
708 case '[':
709 sqBrackets--
710 }
711 i--
712 }
713 return s[i+1:]
714 }
715
716 func nameFor(t *abi.Type) string {
717 return toRType(t).Name()
718 }
719
720 func (t *rtype) ChanDir() ChanDir {
721 if t.Kind() != Chan {
722 panic("reflect: ChanDir of non-chan type " + t.String())
723 }
724 tt := (*abi.ChanType)(unsafe.Pointer(t))
725 return ChanDir(tt.Dir)
726 }
727
728 func toRType(t *abi.Type) *rtype {
729 return (*rtype)(unsafe.Pointer(t))
730 }
731
732 func elem(t *abi.Type) *abi.Type {
733 et := t.Elem()
734 if et != nil {
735 return et
736 }
737 panic("reflect: Elem of invalid type " + stringFor(t))
738 }
739
740 func (t *rtype) Elem() Type {
741 return toType(elem(t.common()))
742 }
743
744 func (t *rtype) Field(i int) StructField {
745 if t.Kind() != Struct {
746 panic("reflect: Field of non-struct type " + t.String())
747 }
748 tt := (*structType)(unsafe.Pointer(t))
749 return tt.Field(i)
750 }
751
752 func (t *rtype) FieldByIndex(index []int) StructField {
753 if t.Kind() != Struct {
754 panic("reflect: FieldByIndex of non-struct type " + t.String())
755 }
756 tt := (*structType)(unsafe.Pointer(t))
757 return tt.FieldByIndex(index)
758 }
759
760 func (t *rtype) FieldByName(name string) (StructField, bool) {
761 if t.Kind() != Struct {
762 panic("reflect: FieldByName of non-struct type " + t.String())
763 }
764 tt := (*structType)(unsafe.Pointer(t))
765 return tt.FieldByName(name)
766 }
767
768 func (t *rtype) FieldByNameFunc(match func(string) bool) (StructField, bool) {
769 if t.Kind() != Struct {
770 panic("reflect: FieldByNameFunc of non-struct type " + t.String())
771 }
772 tt := (*structType)(unsafe.Pointer(t))
773 return tt.FieldByNameFunc(match)
774 }
775
776 func (t *rtype) Key() Type {
777 if t.Kind() != Map {
778 panic("reflect: Key of non-map type " + t.String())
779 }
780 tt := (*mapType)(unsafe.Pointer(t))
781 return toType(tt.Key)
782 }
783
784 func (t *rtype) Len() int {
785 if t.Kind() != Array {
786 panic("reflect: Len of non-array type " + t.String())
787 }
788 tt := (*arrayType)(unsafe.Pointer(t))
789 return int(tt.Len)
790 }
791
792 func (t *rtype) NumField() int {
793 if t.Kind() != Struct {
794 panic("reflect: NumField of non-struct type " + t.String())
795 }
796 tt := (*structType)(unsafe.Pointer(t))
797 return len(tt.Fields)
798 }
799
800 func (t *rtype) In(i int) Type {
801 if t.Kind() != Func {
802 panic("reflect: In of non-func type " + t.String())
803 }
804 tt := (*abi.FuncType)(unsafe.Pointer(t))
805 return toType(tt.InSlice()[i])
806 }
807
808 func (t *rtype) NumIn() int {
809 if t.Kind() != Func {
810 panic("reflect: NumIn of non-func type " + t.String())
811 }
812 tt := (*abi.FuncType)(unsafe.Pointer(t))
813 return tt.NumIn()
814 }
815
816 func (t *rtype) NumOut() int {
817 if t.Kind() != Func {
818 panic("reflect: NumOut of non-func type " + t.String())
819 }
820 tt := (*abi.FuncType)(unsafe.Pointer(t))
821 return tt.NumOut()
822 }
823
824 func (t *rtype) Out(i int) Type {
825 if t.Kind() != Func {
826 panic("reflect: Out of non-func type " + t.String())
827 }
828 tt := (*abi.FuncType)(unsafe.Pointer(t))
829 return toType(tt.OutSlice()[i])
830 }
831
832 func (t *rtype) IsVariadic() bool {
833 if t.Kind() != Func {
834 panic("reflect: IsVariadic of non-func type " + t.String())
835 }
836 tt := (*abi.FuncType)(unsafe.Pointer(t))
837 return tt.IsVariadic()
838 }
839
840 func (t *rtype) OverflowComplex(x complex128) bool {
841 k := t.Kind()
842 switch k {
843 case Complex64:
844 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
845 case Complex128:
846 return false
847 }
848 panic("reflect: OverflowComplex of non-complex type " + t.String())
849 }
850
851 func (t *rtype) OverflowFloat(x float64) bool {
852 k := t.Kind()
853 switch k {
854 case Float32:
855 return overflowFloat32(x)
856 case Float64:
857 return false
858 }
859 panic("reflect: OverflowFloat of non-float type " + t.String())
860 }
861
862 func (t *rtype) OverflowInt(x int64) bool {
863 k := t.Kind()
864 switch k {
865 case Int, Int8, Int16, Int32, Int64:
866 bitSize := t.Size() * 8
867 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
868 return x != trunc
869 }
870 panic("reflect: OverflowInt of non-int type " + t.String())
871 }
872
873 func (t *rtype) OverflowUint(x uint64) bool {
874 k := t.Kind()
875 switch k {
876 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
877 bitSize := t.Size() * 8
878 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
879 return x != trunc
880 }
881 panic("reflect: OverflowUint of non-uint type " + t.String())
882 }
883
884 func (t *rtype) CanSeq() bool {
885 switch t.Kind() {
886 case Int8, Int16, Int32, Int64, Int, Uint8, Uint16, Uint32, Uint64, Uint, Uintptr, Array, Slice, Chan, String, Map:
887 return true
888 case Func:
889 return canRangeFunc(&t.t)
890 case Pointer:
891 return t.Elem().Kind() == Array
892 }
893 return false
894 }
895
896 func canRangeFunc(t *abi.Type) bool {
897 if t.Kind() != abi.Func {
898 return false
899 }
900 f := t.FuncType()
901 if f.InCount != 1 || f.OutCount != 0 {
902 return false
903 }
904 y := f.In(0)
905 if y.Kind() != abi.Func {
906 return false
907 }
908 yield := y.FuncType()
909 return yield.InCount == 1 && yield.OutCount == 1 && yield.Out(0).Kind() == abi.Bool
910 }
911
912 func (t *rtype) CanSeq2() bool {
913 switch t.Kind() {
914 case Array, Slice, String, Map:
915 return true
916 case Func:
917 return canRangeFunc2(&t.t)
918 case Pointer:
919 return t.Elem().Kind() == Array
920 }
921 return false
922 }
923
924 func canRangeFunc2(t *abi.Type) bool {
925 if t.Kind() != abi.Func {
926 return false
927 }
928 f := t.FuncType()
929 if f.InCount != 1 || f.OutCount != 0 {
930 return false
931 }
932 y := f.In(0)
933 if y.Kind() != abi.Func {
934 return false
935 }
936 yield := y.FuncType()
937 return yield.InCount == 2 && yield.OutCount == 1 && yield.Out(0).Kind() == abi.Bool
938 }
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958 func add(p unsafe.Pointer, x uintptr, whySafe string) unsafe.Pointer {
959 return unsafe.Pointer(uintptr(p) + x)
960 }
961
962 func (d ChanDir) String() string {
963 switch d {
964 case SendDir:
965 return "chan<-"
966 case RecvDir:
967 return "<-chan"
968 case BothDir:
969 return "chan"
970 }
971 return "ChanDir" + strconv.Itoa(int(d))
972 }
973
974
975 func (t *interfaceType) Method(i int) (m Method) {
976 if i < 0 || i >= len(t.Methods) {
977 return
978 }
979 p := &t.Methods[i]
980 pname := t.nameOff(p.Name)
981 m.Name = pname.Name()
982 if !pname.IsExported() {
983 m.PkgPath = pkgPath(pname)
984 if m.PkgPath == "" {
985 m.PkgPath = t.PkgPath.Name()
986 }
987 }
988 m.Type = toType(t.typeOff(p.Typ))
989 m.Index = i
990 return
991 }
992
993
994 func (t *interfaceType) NumMethod() int { return len(t.Methods) }
995
996
997 func (t *interfaceType) MethodByName(name string) (m Method, ok bool) {
998 if t == nil {
999 return
1000 }
1001 var p *abi.Imethod
1002 for i := range t.Methods {
1003 p = &t.Methods[i]
1004 if t.nameOff(p.Name).Name() == name {
1005 return t.Method(i), true
1006 }
1007 }
1008 return
1009 }
1010
1011
1012 type StructField struct {
1013
1014 Name string
1015
1016
1017
1018
1019 PkgPath string
1020
1021 Type Type
1022 Tag StructTag
1023 Offset uintptr
1024 Index []int
1025 Anonymous bool
1026 }
1027
1028
1029 func (f StructField) IsExported() bool {
1030 return f.PkgPath == ""
1031 }
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041 type StructTag string
1042
1043
1044
1045
1046
1047
1048 func (tag StructTag) Get(key string) string {
1049 v, _ := tag.Lookup(key)
1050 return v
1051 }
1052
1053
1054
1055
1056
1057
1058
1059 func (tag StructTag) Lookup(key string) (value string, ok bool) {
1060
1061
1062
1063 for tag != "" {
1064
1065 i := 0
1066 for i < len(tag) && tag[i] == ' ' {
1067 i++
1068 }
1069 tag = tag[i:]
1070 if tag == "" {
1071 break
1072 }
1073
1074
1075
1076
1077
1078 i = 0
1079 for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
1080 i++
1081 }
1082 if i == 0 || i+1 >= len(tag) || tag[i] != ':' || tag[i+1] != '"' {
1083 break
1084 }
1085 name := string(tag[:i])
1086 tag = tag[i+1:]
1087
1088
1089 i = 1
1090 for i < len(tag) && tag[i] != '"' {
1091 if tag[i] == '\\' {
1092 i++
1093 }
1094 i++
1095 }
1096 if i >= len(tag) {
1097 break
1098 }
1099 qvalue := string(tag[:i+1])
1100 tag = tag[i+1:]
1101
1102 if key == name {
1103 value, err := strconv.Unquote(qvalue)
1104 if err != nil {
1105 break
1106 }
1107 return value, true
1108 }
1109 }
1110 return "", false
1111 }
1112
1113
1114 func (t *structType) Field(i int) (f StructField) {
1115 if i < 0 || i >= len(t.Fields) {
1116 panic("reflect: Field index out of bounds")
1117 }
1118 p := &t.Fields[i]
1119 f.Type = toType(p.Typ)
1120 f.Name = p.Name.Name()
1121 f.Anonymous = p.Embedded()
1122 if !p.Name.IsExported() {
1123 f.PkgPath = t.PkgPath.Name()
1124 }
1125 if tag := p.Name.Tag(); tag != "" {
1126 f.Tag = StructTag(tag)
1127 }
1128 f.Offset = p.Offset
1129
1130
1131
1132
1133
1134
1135
1136
1137 f.Index = []int{i}
1138 return
1139 }
1140
1141
1142
1143
1144
1145 func (t *structType) FieldByIndex(index []int) (f StructField) {
1146 f.Type = toType(&t.Type)
1147 for i, x := range index {
1148 if i > 0 {
1149 ft := f.Type
1150 if ft.Kind() == Pointer && ft.Elem().Kind() == Struct {
1151 ft = ft.Elem()
1152 }
1153 f.Type = ft
1154 }
1155 f = f.Type.Field(x)
1156 }
1157 return
1158 }
1159
1160
1161 type fieldScan struct {
1162 typ *structType
1163 index []int
1164 }
1165
1166
1167
1168 func (t *structType) FieldByNameFunc(match func(string) bool) (result StructField, ok bool) {
1169
1170
1171
1172
1173
1174
1175
1176
1177 current := []fieldScan{}
1178 next := []fieldScan{{typ: t}}
1179
1180
1181
1182
1183
1184
1185
1186 var nextCount map[*structType]int
1187
1188
1189
1190
1191
1192
1193 visited := map[*structType]bool{}
1194
1195 for len(next) > 0 {
1196 current, next = next, current[:0]
1197 count := nextCount
1198 nextCount = nil
1199
1200
1201
1202
1203
1204 for _, scan := range current {
1205 t := scan.typ
1206 if visited[t] {
1207
1208
1209
1210 continue
1211 }
1212 visited[t] = true
1213 for i := range t.Fields {
1214 f := &t.Fields[i]
1215
1216 fname := f.Name.Name()
1217 var ntyp *abi.Type
1218 if f.Embedded() {
1219
1220 ntyp = f.Typ
1221 if ntyp.Kind() == abi.Pointer {
1222 ntyp = ntyp.Elem()
1223 }
1224 }
1225
1226
1227 if match(fname) {
1228
1229 if count[t] > 1 || ok {
1230
1231 return StructField{}, false
1232 }
1233 result = t.Field(i)
1234 result.Index = nil
1235 result.Index = append(result.Index, scan.index...)
1236 result.Index = append(result.Index, i)
1237 ok = true
1238 continue
1239 }
1240
1241
1242
1243
1244 if ok || ntyp == nil || ntyp.Kind() != abi.Struct {
1245 continue
1246 }
1247 styp := (*structType)(unsafe.Pointer(ntyp))
1248 if nextCount[styp] > 0 {
1249 nextCount[styp] = 2
1250 continue
1251 }
1252 if nextCount == nil {
1253 nextCount = map[*structType]int{}
1254 }
1255 nextCount[styp] = 1
1256 if count[t] > 1 {
1257 nextCount[styp] = 2
1258 }
1259 var index []int
1260 index = append(index, scan.index...)
1261 index = append(index, i)
1262 next = append(next, fieldScan{styp, index})
1263 }
1264 }
1265 if ok {
1266 break
1267 }
1268 }
1269 return
1270 }
1271
1272
1273
1274 func (t *structType) FieldByName(name string) (f StructField, present bool) {
1275
1276 hasEmbeds := false
1277 if name != "" {
1278 for i := range t.Fields {
1279 tf := &t.Fields[i]
1280 if tf.Name.Name() == name {
1281 return t.Field(i), true
1282 }
1283 if tf.Embedded() {
1284 hasEmbeds = true
1285 }
1286 }
1287 }
1288 if !hasEmbeds {
1289 return
1290 }
1291 return t.FieldByNameFunc(func(s string) bool { return s == name })
1292 }
1293
1294
1295
1296 func TypeOf(i any) Type {
1297 return toType(abi.TypeOf(i))
1298 }
1299
1300
1301 func rtypeOf(i any) *abi.Type {
1302 return abi.TypeOf(i)
1303 }
1304
1305
1306 var ptrMap sync.Map
1307
1308
1309
1310
1311
1312
1313
1314
1315 func PtrTo(t Type) Type { return PointerTo(t) }
1316
1317
1318
1319 func PointerTo(t Type) Type {
1320 return toRType(t.(*rtype).ptrTo())
1321 }
1322
1323 func (t *rtype) ptrTo() *abi.Type {
1324 at := &t.t
1325 if at.PtrToThis != 0 {
1326 return t.typeOff(at.PtrToThis)
1327 }
1328
1329
1330 if pi, ok := ptrMap.Load(t); ok {
1331 return &pi.(*ptrType).Type
1332 }
1333
1334
1335 s := "*" + t.String()
1336 for _, tt := range typesByString(s) {
1337 p := (*ptrType)(unsafe.Pointer(tt))
1338 if p.Elem != &t.t {
1339 continue
1340 }
1341 pi, _ := ptrMap.LoadOrStore(t, p)
1342 return &pi.(*ptrType).Type
1343 }
1344
1345
1346
1347 var iptr any = (*unsafe.Pointer)(nil)
1348 prototype := *(**ptrType)(unsafe.Pointer(&iptr))
1349 pp := *prototype
1350
1351 pp.Str = resolveReflectName(newName(s, "", false, false))
1352 pp.PtrToThis = 0
1353
1354
1355
1356
1357
1358
1359 pp.Hash = fnv1(t.t.Hash, '*')
1360
1361 pp.Elem = at
1362
1363 pi, _ := ptrMap.LoadOrStore(t, &pp)
1364 return &pi.(*ptrType).Type
1365 }
1366
1367 func ptrTo(t *abi.Type) *abi.Type {
1368 return toRType(t).ptrTo()
1369 }
1370
1371
1372 func fnv1(x uint32, list ...byte) uint32 {
1373 for _, b := range list {
1374 x = x*16777619 ^ uint32(b)
1375 }
1376 return x
1377 }
1378
1379 func (t *rtype) Implements(u Type) bool {
1380 if u == nil {
1381 panic("reflect: nil type passed to Type.Implements")
1382 }
1383 if u.Kind() != Interface {
1384 panic("reflect: non-interface type passed to Type.Implements")
1385 }
1386 return implements(u.common(), t.common())
1387 }
1388
1389 func (t *rtype) AssignableTo(u Type) bool {
1390 if u == nil {
1391 panic("reflect: nil type passed to Type.AssignableTo")
1392 }
1393 uu := u.common()
1394 return directlyAssignable(uu, t.common()) || implements(uu, t.common())
1395 }
1396
1397 func (t *rtype) ConvertibleTo(u Type) bool {
1398 if u == nil {
1399 panic("reflect: nil type passed to Type.ConvertibleTo")
1400 }
1401 return convertOp(u.common(), t.common()) != nil
1402 }
1403
1404 func (t *rtype) Comparable() bool {
1405 return t.t.Equal != nil
1406 }
1407
1408
1409 func implements(T, V *abi.Type) bool {
1410 if T.Kind() != abi.Interface {
1411 return false
1412 }
1413 t := (*interfaceType)(unsafe.Pointer(T))
1414 if len(t.Methods) == 0 {
1415 return true
1416 }
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430 if V.Kind() == abi.Interface {
1431 v := (*interfaceType)(unsafe.Pointer(V))
1432 i := 0
1433 for j := 0; j < len(v.Methods); j++ {
1434 tm := &t.Methods[i]
1435 tmName := t.nameOff(tm.Name)
1436 vm := &v.Methods[j]
1437 vmName := nameOffFor(V, vm.Name)
1438 if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Typ) == t.typeOff(tm.Typ) {
1439 if !tmName.IsExported() {
1440 tmPkgPath := pkgPath(tmName)
1441 if tmPkgPath == "" {
1442 tmPkgPath = t.PkgPath.Name()
1443 }
1444 vmPkgPath := pkgPath(vmName)
1445 if vmPkgPath == "" {
1446 vmPkgPath = v.PkgPath.Name()
1447 }
1448 if tmPkgPath != vmPkgPath {
1449 continue
1450 }
1451 }
1452 if i++; i >= len(t.Methods) {
1453 return true
1454 }
1455 }
1456 }
1457 return false
1458 }
1459
1460 v := V.Uncommon()
1461 if v == nil {
1462 return false
1463 }
1464 i := 0
1465 vmethods := v.Methods()
1466 for j := 0; j < int(v.Mcount); j++ {
1467 tm := &t.Methods[i]
1468 tmName := t.nameOff(tm.Name)
1469 vm := vmethods[j]
1470 vmName := nameOffFor(V, vm.Name)
1471 if vmName.Name() == tmName.Name() && typeOffFor(V, vm.Mtyp) == t.typeOff(tm.Typ) {
1472 if !tmName.IsExported() {
1473 tmPkgPath := pkgPath(tmName)
1474 if tmPkgPath == "" {
1475 tmPkgPath = t.PkgPath.Name()
1476 }
1477 vmPkgPath := pkgPath(vmName)
1478 if vmPkgPath == "" {
1479 vmPkgPath = nameOffFor(V, v.PkgPath).Name()
1480 }
1481 if tmPkgPath != vmPkgPath {
1482 continue
1483 }
1484 }
1485 if i++; i >= len(t.Methods) {
1486 return true
1487 }
1488 }
1489 }
1490 return false
1491 }
1492
1493
1494
1495
1496
1497 func specialChannelAssignability(T, V *abi.Type) bool {
1498
1499
1500
1501
1502 return V.ChanDir() == abi.BothDir && (nameFor(T) == "" || nameFor(V) == "") && haveIdenticalType(T.Elem(), V.Elem(), true)
1503 }
1504
1505
1506
1507
1508
1509
1510 func directlyAssignable(T, V *abi.Type) bool {
1511
1512 if T == V {
1513 return true
1514 }
1515
1516
1517
1518 if T.HasName() && V.HasName() || T.Kind() != V.Kind() {
1519 return false
1520 }
1521
1522 if T.Kind() == abi.Chan && specialChannelAssignability(T, V) {
1523 return true
1524 }
1525
1526
1527 return haveIdenticalUnderlyingType(T, V, true)
1528 }
1529
1530 func haveIdenticalType(T, V *abi.Type, cmpTags bool) bool {
1531 if cmpTags {
1532 return T == V
1533 }
1534
1535 if nameFor(T) != nameFor(V) || T.Kind() != V.Kind() || pkgPathFor(T) != pkgPathFor(V) {
1536 return false
1537 }
1538
1539 return haveIdenticalUnderlyingType(T, V, false)
1540 }
1541
1542 func haveIdenticalUnderlyingType(T, V *abi.Type, cmpTags bool) bool {
1543 if T == V {
1544 return true
1545 }
1546
1547 kind := Kind(T.Kind())
1548 if kind != Kind(V.Kind()) {
1549 return false
1550 }
1551
1552
1553
1554 if Bool <= kind && kind <= Complex128 || kind == String || kind == UnsafePointer {
1555 return true
1556 }
1557
1558
1559 switch kind {
1560 case Array:
1561 return T.Len() == V.Len() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1562
1563 case Chan:
1564 return V.ChanDir() == T.ChanDir() && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1565
1566 case Func:
1567 t := (*funcType)(unsafe.Pointer(T))
1568 v := (*funcType)(unsafe.Pointer(V))
1569 if t.OutCount != v.OutCount || t.InCount != v.InCount {
1570 return false
1571 }
1572 for i := 0; i < t.NumIn(); i++ {
1573 if !haveIdenticalType(t.In(i), v.In(i), cmpTags) {
1574 return false
1575 }
1576 }
1577 for i := 0; i < t.NumOut(); i++ {
1578 if !haveIdenticalType(t.Out(i), v.Out(i), cmpTags) {
1579 return false
1580 }
1581 }
1582 return true
1583
1584 case Interface:
1585 t := (*interfaceType)(unsafe.Pointer(T))
1586 v := (*interfaceType)(unsafe.Pointer(V))
1587 if len(t.Methods) == 0 && len(v.Methods) == 0 {
1588 return true
1589 }
1590
1591
1592 return false
1593
1594 case Map:
1595 return haveIdenticalType(T.Key(), V.Key(), cmpTags) && haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1596
1597 case Pointer, Slice:
1598 return haveIdenticalType(T.Elem(), V.Elem(), cmpTags)
1599
1600 case Struct:
1601 t := (*structType)(unsafe.Pointer(T))
1602 v := (*structType)(unsafe.Pointer(V))
1603 if len(t.Fields) != len(v.Fields) {
1604 return false
1605 }
1606 if t.PkgPath.Name() != v.PkgPath.Name() {
1607 return false
1608 }
1609 for i := range t.Fields {
1610 tf := &t.Fields[i]
1611 vf := &v.Fields[i]
1612 if tf.Name.Name() != vf.Name.Name() {
1613 return false
1614 }
1615 if !haveIdenticalType(tf.Typ, vf.Typ, cmpTags) {
1616 return false
1617 }
1618 if cmpTags && tf.Name.Tag() != vf.Name.Tag() {
1619 return false
1620 }
1621 if tf.Offset != vf.Offset {
1622 return false
1623 }
1624 if tf.Embedded() != vf.Embedded() {
1625 return false
1626 }
1627 }
1628 return true
1629 }
1630
1631 return false
1632 }
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653 func typelinks() (sections []unsafe.Pointer, offset [][]int32)
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664 func rtypeOff(section unsafe.Pointer, off int32) *abi.Type {
1665 return (*abi.Type)(add(section, uintptr(off), "sizeof(rtype) > 0"))
1666 }
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683 func typesByString(s string) []*abi.Type {
1684 sections, offset := typelinks()
1685 var ret []*abi.Type
1686
1687 for offsI, offs := range offset {
1688 section := sections[offsI]
1689
1690
1691
1692 i, j := 0, len(offs)
1693 for i < j {
1694 h := int(uint(i+j) >> 1)
1695
1696 if !(stringFor(rtypeOff(section, offs[h])) >= s) {
1697 i = h + 1
1698 } else {
1699 j = h
1700 }
1701 }
1702
1703
1704
1705
1706
1707 for j := i; j < len(offs); j++ {
1708 typ := rtypeOff(section, offs[j])
1709 if stringFor(typ) != s {
1710 break
1711 }
1712 ret = append(ret, typ)
1713 }
1714 }
1715 return ret
1716 }
1717
1718
1719 var lookupCache sync.Map
1720
1721
1722
1723
1724 type cacheKey struct {
1725 kind Kind
1726 t1 *abi.Type
1727 t2 *abi.Type
1728 extra uintptr
1729 }
1730
1731
1732
1733
1734 var funcLookupCache struct {
1735 sync.Mutex
1736
1737
1738
1739 m sync.Map
1740 }
1741
1742
1743
1744
1745
1746
1747 func ChanOf(dir ChanDir, t Type) Type {
1748 typ := t.common()
1749
1750
1751 ckey := cacheKey{Chan, typ, nil, uintptr(dir)}
1752 if ch, ok := lookupCache.Load(ckey); ok {
1753 return ch.(*rtype)
1754 }
1755
1756
1757 if typ.Size_ >= 1<<16 {
1758 panic("reflect.ChanOf: element size too large")
1759 }
1760
1761
1762 var s string
1763 switch dir {
1764 default:
1765 panic("reflect.ChanOf: invalid dir")
1766 case SendDir:
1767 s = "chan<- " + stringFor(typ)
1768 case RecvDir:
1769 s = "<-chan " + stringFor(typ)
1770 case BothDir:
1771 typeStr := stringFor(typ)
1772 if typeStr[0] == '<' {
1773
1774
1775
1776
1777 s = "chan (" + typeStr + ")"
1778 } else {
1779 s = "chan " + typeStr
1780 }
1781 }
1782 for _, tt := range typesByString(s) {
1783 ch := (*chanType)(unsafe.Pointer(tt))
1784 if ch.Elem == typ && ch.Dir == abi.ChanDir(dir) {
1785 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
1786 return ti.(Type)
1787 }
1788 }
1789
1790
1791 var ichan any = (chan unsafe.Pointer)(nil)
1792 prototype := *(**chanType)(unsafe.Pointer(&ichan))
1793 ch := *prototype
1794 ch.TFlag = abi.TFlagRegularMemory
1795 ch.Dir = abi.ChanDir(dir)
1796 ch.Str = resolveReflectName(newName(s, "", false, false))
1797 ch.Hash = fnv1(typ.Hash, 'c', byte(dir))
1798 ch.Elem = typ
1799
1800 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&ch.Type))
1801 return ti.(Type)
1802 }
1803
1804
1805
1806
1807
1808
1809
1810 func MapOf(key, elem Type) Type {
1811 ktyp := key.common()
1812 etyp := elem.common()
1813
1814 if ktyp.Equal == nil {
1815 panic("reflect.MapOf: invalid key type " + stringFor(ktyp))
1816 }
1817
1818
1819 ckey := cacheKey{Map, ktyp, etyp, 0}
1820 if mt, ok := lookupCache.Load(ckey); ok {
1821 return mt.(Type)
1822 }
1823
1824
1825 s := "map[" + stringFor(ktyp) + "]" + stringFor(etyp)
1826 for _, tt := range typesByString(s) {
1827 mt := (*mapType)(unsafe.Pointer(tt))
1828 if mt.Key == ktyp && mt.Elem == etyp {
1829 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
1830 return ti.(Type)
1831 }
1832 }
1833
1834
1835
1836
1837 var imap any = (map[unsafe.Pointer]unsafe.Pointer)(nil)
1838 mt := **(**mapType)(unsafe.Pointer(&imap))
1839 mt.Str = resolveReflectName(newName(s, "", false, false))
1840 mt.TFlag = 0
1841 mt.Hash = fnv1(etyp.Hash, 'm', byte(ktyp.Hash>>24), byte(ktyp.Hash>>16), byte(ktyp.Hash>>8), byte(ktyp.Hash))
1842 mt.Key = ktyp
1843 mt.Elem = etyp
1844 mt.Bucket = bucketOf(ktyp, etyp)
1845 mt.Hasher = func(p unsafe.Pointer, seed uintptr) uintptr {
1846 return typehash(ktyp, p, seed)
1847 }
1848 mt.Flags = 0
1849 if ktyp.Size_ > abi.MapMaxKeyBytes {
1850 mt.KeySize = uint8(goarch.PtrSize)
1851 mt.Flags |= 1
1852 } else {
1853 mt.KeySize = uint8(ktyp.Size_)
1854 }
1855 if etyp.Size_ > abi.MapMaxElemBytes {
1856 mt.ValueSize = uint8(goarch.PtrSize)
1857 mt.Flags |= 2
1858 } else {
1859 mt.MapType.ValueSize = uint8(etyp.Size_)
1860 }
1861 mt.MapType.BucketSize = uint16(mt.Bucket.Size_)
1862 if isReflexive(ktyp) {
1863 mt.Flags |= 4
1864 }
1865 if needKeyUpdate(ktyp) {
1866 mt.Flags |= 8
1867 }
1868 if hashMightPanic(ktyp) {
1869 mt.Flags |= 16
1870 }
1871 mt.PtrToThis = 0
1872
1873 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&mt.Type))
1874 return ti.(Type)
1875 }
1876
1877 var funcTypes []Type
1878 var funcTypesMutex sync.Mutex
1879
1880 func initFuncTypes(n int) Type {
1881 funcTypesMutex.Lock()
1882 defer funcTypesMutex.Unlock()
1883 if n >= len(funcTypes) {
1884 newFuncTypes := make([]Type, n+1)
1885 copy(newFuncTypes, funcTypes)
1886 funcTypes = newFuncTypes
1887 }
1888 if funcTypes[n] != nil {
1889 return funcTypes[n]
1890 }
1891
1892 funcTypes[n] = StructOf([]StructField{
1893 {
1894 Name: "FuncType",
1895 Type: TypeOf(funcType{}),
1896 },
1897 {
1898 Name: "Args",
1899 Type: ArrayOf(n, TypeOf(&rtype{})),
1900 },
1901 })
1902 return funcTypes[n]
1903 }
1904
1905
1906
1907
1908
1909
1910
1911
1912 func FuncOf(in, out []Type, variadic bool) Type {
1913 if variadic && (len(in) == 0 || in[len(in)-1].Kind() != Slice) {
1914 panic("reflect.FuncOf: last arg of variadic func must be slice")
1915 }
1916
1917
1918 var ifunc any = (func())(nil)
1919 prototype := *(**funcType)(unsafe.Pointer(&ifunc))
1920 n := len(in) + len(out)
1921
1922 if n > 128 {
1923 panic("reflect.FuncOf: too many arguments")
1924 }
1925
1926 o := New(initFuncTypes(n)).Elem()
1927 ft := (*funcType)(unsafe.Pointer(o.Field(0).Addr().Pointer()))
1928 args := unsafe.Slice((**rtype)(unsafe.Pointer(o.Field(1).Addr().Pointer())), n)[0:0:n]
1929 *ft = *prototype
1930
1931
1932 var hash uint32
1933 for _, in := range in {
1934 t := in.(*rtype)
1935 args = append(args, t)
1936 hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash))
1937 }
1938 if variadic {
1939 hash = fnv1(hash, 'v')
1940 }
1941 hash = fnv1(hash, '.')
1942 for _, out := range out {
1943 t := out.(*rtype)
1944 args = append(args, t)
1945 hash = fnv1(hash, byte(t.t.Hash>>24), byte(t.t.Hash>>16), byte(t.t.Hash>>8), byte(t.t.Hash))
1946 }
1947
1948 ft.TFlag = 0
1949 ft.Hash = hash
1950 ft.InCount = uint16(len(in))
1951 ft.OutCount = uint16(len(out))
1952 if variadic {
1953 ft.OutCount |= 1 << 15
1954 }
1955
1956
1957 if ts, ok := funcLookupCache.m.Load(hash); ok {
1958 for _, t := range ts.([]*abi.Type) {
1959 if haveIdenticalUnderlyingType(&ft.Type, t, true) {
1960 return toRType(t)
1961 }
1962 }
1963 }
1964
1965
1966 funcLookupCache.Lock()
1967 defer funcLookupCache.Unlock()
1968 if ts, ok := funcLookupCache.m.Load(hash); ok {
1969 for _, t := range ts.([]*abi.Type) {
1970 if haveIdenticalUnderlyingType(&ft.Type, t, true) {
1971 return toRType(t)
1972 }
1973 }
1974 }
1975
1976 addToCache := func(tt *abi.Type) Type {
1977 var rts []*abi.Type
1978 if rti, ok := funcLookupCache.m.Load(hash); ok {
1979 rts = rti.([]*abi.Type)
1980 }
1981 funcLookupCache.m.Store(hash, append(rts, tt))
1982 return toType(tt)
1983 }
1984
1985
1986 str := funcStr(ft)
1987 for _, tt := range typesByString(str) {
1988 if haveIdenticalUnderlyingType(&ft.Type, tt, true) {
1989 return addToCache(tt)
1990 }
1991 }
1992
1993
1994 ft.Str = resolveReflectName(newName(str, "", false, false))
1995 ft.PtrToThis = 0
1996 return addToCache(&ft.Type)
1997 }
1998 func stringFor(t *abi.Type) string {
1999 return toRType(t).String()
2000 }
2001
2002
2003 func funcStr(ft *funcType) string {
2004 repr := make([]byte, 0, 64)
2005 repr = append(repr, "func("...)
2006 for i, t := range ft.InSlice() {
2007 if i > 0 {
2008 repr = append(repr, ", "...)
2009 }
2010 if ft.IsVariadic() && i == int(ft.InCount)-1 {
2011 repr = append(repr, "..."...)
2012 repr = append(repr, stringFor((*sliceType)(unsafe.Pointer(t)).Elem)...)
2013 } else {
2014 repr = append(repr, stringFor(t)...)
2015 }
2016 }
2017 repr = append(repr, ')')
2018 out := ft.OutSlice()
2019 if len(out) == 1 {
2020 repr = append(repr, ' ')
2021 } else if len(out) > 1 {
2022 repr = append(repr, " ("...)
2023 }
2024 for i, t := range out {
2025 if i > 0 {
2026 repr = append(repr, ", "...)
2027 }
2028 repr = append(repr, stringFor(t)...)
2029 }
2030 if len(out) > 1 {
2031 repr = append(repr, ')')
2032 }
2033 return string(repr)
2034 }
2035
2036
2037
2038 func isReflexive(t *abi.Type) bool {
2039 switch Kind(t.Kind()) {
2040 case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, String, UnsafePointer:
2041 return true
2042 case Float32, Float64, Complex64, Complex128, Interface:
2043 return false
2044 case Array:
2045 tt := (*arrayType)(unsafe.Pointer(t))
2046 return isReflexive(tt.Elem)
2047 case Struct:
2048 tt := (*structType)(unsafe.Pointer(t))
2049 for _, f := range tt.Fields {
2050 if !isReflexive(f.Typ) {
2051 return false
2052 }
2053 }
2054 return true
2055 default:
2056
2057 panic("isReflexive called on non-key type " + stringFor(t))
2058 }
2059 }
2060
2061
2062 func needKeyUpdate(t *abi.Type) bool {
2063 switch Kind(t.Kind()) {
2064 case Bool, Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr, Chan, Pointer, UnsafePointer:
2065 return false
2066 case Float32, Float64, Complex64, Complex128, Interface, String:
2067
2068
2069
2070 return true
2071 case Array:
2072 tt := (*arrayType)(unsafe.Pointer(t))
2073 return needKeyUpdate(tt.Elem)
2074 case Struct:
2075 tt := (*structType)(unsafe.Pointer(t))
2076 for _, f := range tt.Fields {
2077 if needKeyUpdate(f.Typ) {
2078 return true
2079 }
2080 }
2081 return false
2082 default:
2083
2084 panic("needKeyUpdate called on non-key type " + stringFor(t))
2085 }
2086 }
2087
2088
2089 func hashMightPanic(t *abi.Type) bool {
2090 switch Kind(t.Kind()) {
2091 case Interface:
2092 return true
2093 case Array:
2094 tt := (*arrayType)(unsafe.Pointer(t))
2095 return hashMightPanic(tt.Elem)
2096 case Struct:
2097 tt := (*structType)(unsafe.Pointer(t))
2098 for _, f := range tt.Fields {
2099 if hashMightPanic(f.Typ) {
2100 return true
2101 }
2102 }
2103 return false
2104 default:
2105 return false
2106 }
2107 }
2108
2109 func bucketOf(ktyp, etyp *abi.Type) *abi.Type {
2110 if ktyp.Size_ > abi.MapMaxKeyBytes {
2111 ktyp = ptrTo(ktyp)
2112 }
2113 if etyp.Size_ > abi.MapMaxElemBytes {
2114 etyp = ptrTo(etyp)
2115 }
2116
2117
2118
2119
2120
2121
2122 var gcdata *byte
2123 var ptrdata uintptr
2124
2125 size := abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize
2126 if size&uintptr(ktyp.Align_-1) != 0 || size&uintptr(etyp.Align_-1) != 0 {
2127 panic("reflect: bad size computation in MapOf")
2128 }
2129
2130 if ktyp.Pointers() || etyp.Pointers() {
2131 nptr := (abi.MapBucketCount*(1+ktyp.Size_+etyp.Size_) + goarch.PtrSize) / goarch.PtrSize
2132 n := (nptr + 7) / 8
2133
2134
2135 n = (n + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
2136 mask := make([]byte, n)
2137 base := uintptr(abi.MapBucketCount / goarch.PtrSize)
2138
2139 if ktyp.Pointers() {
2140 emitGCMask(mask, base, ktyp, abi.MapBucketCount)
2141 }
2142 base += abi.MapBucketCount * ktyp.Size_ / goarch.PtrSize
2143
2144 if etyp.Pointers() {
2145 emitGCMask(mask, base, etyp, abi.MapBucketCount)
2146 }
2147 base += abi.MapBucketCount * etyp.Size_ / goarch.PtrSize
2148
2149 word := base
2150 mask[word/8] |= 1 << (word % 8)
2151 gcdata = &mask[0]
2152 ptrdata = (word + 1) * goarch.PtrSize
2153
2154
2155 if ptrdata != size {
2156 panic("reflect: bad layout computation in MapOf")
2157 }
2158 }
2159
2160 b := &abi.Type{
2161 Align_: goarch.PtrSize,
2162 Size_: size,
2163 Kind_: abi.Struct,
2164 PtrBytes: ptrdata,
2165 GCData: gcdata,
2166 }
2167 s := "bucket(" + stringFor(ktyp) + "," + stringFor(etyp) + ")"
2168 b.Str = resolveReflectName(newName(s, "", false, false))
2169 return b
2170 }
2171
2172 func (t *rtype) gcSlice(begin, end uintptr) []byte {
2173 return (*[1 << 30]byte)(unsafe.Pointer(t.t.GCData))[begin:end:end]
2174 }
2175
2176
2177
2178 func emitGCMask(out []byte, base uintptr, typ *abi.Type, n uintptr) {
2179 if typ.Kind_&abi.KindGCProg != 0 {
2180 panic("reflect: unexpected GC program")
2181 }
2182 ptrs := typ.PtrBytes / goarch.PtrSize
2183 words := typ.Size_ / goarch.PtrSize
2184 mask := typ.GcSlice(0, (ptrs+7)/8)
2185 for j := uintptr(0); j < ptrs; j++ {
2186 if (mask[j/8]>>(j%8))&1 != 0 {
2187 for i := uintptr(0); i < n; i++ {
2188 k := base + i*words + j
2189 out[k/8] |= 1 << (k % 8)
2190 }
2191 }
2192 }
2193 }
2194
2195
2196
2197 func appendGCProg(dst []byte, typ *abi.Type) []byte {
2198 if typ.Kind_&abi.KindGCProg != 0 {
2199
2200 n := uintptr(*(*uint32)(unsafe.Pointer(typ.GCData)))
2201 prog := typ.GcSlice(4, 4+n-1)
2202 return append(dst, prog...)
2203 }
2204
2205
2206 ptrs := typ.PtrBytes / goarch.PtrSize
2207 mask := typ.GcSlice(0, (ptrs+7)/8)
2208
2209
2210 for ; ptrs > 120; ptrs -= 120 {
2211 dst = append(dst, 120)
2212 dst = append(dst, mask[:15]...)
2213 mask = mask[15:]
2214 }
2215
2216 dst = append(dst, byte(ptrs))
2217 dst = append(dst, mask...)
2218 return dst
2219 }
2220
2221
2222
2223 func SliceOf(t Type) Type {
2224 typ := t.common()
2225
2226
2227 ckey := cacheKey{Slice, typ, nil, 0}
2228 if slice, ok := lookupCache.Load(ckey); ok {
2229 return slice.(Type)
2230 }
2231
2232
2233 s := "[]" + stringFor(typ)
2234 for _, tt := range typesByString(s) {
2235 slice := (*sliceType)(unsafe.Pointer(tt))
2236 if slice.Elem == typ {
2237 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
2238 return ti.(Type)
2239 }
2240 }
2241
2242
2243 var islice any = ([]unsafe.Pointer)(nil)
2244 prototype := *(**sliceType)(unsafe.Pointer(&islice))
2245 slice := *prototype
2246 slice.TFlag = 0
2247 slice.Str = resolveReflectName(newName(s, "", false, false))
2248 slice.Hash = fnv1(typ.Hash, '[')
2249 slice.Elem = typ
2250 slice.PtrToThis = 0
2251
2252 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&slice.Type))
2253 return ti.(Type)
2254 }
2255
2256
2257
2258
2259 var structLookupCache struct {
2260 sync.Mutex
2261
2262
2263
2264 m sync.Map
2265 }
2266
2267 type structTypeUncommon struct {
2268 structType
2269 u uncommonType
2270 }
2271
2272
2273 func isLetter(ch rune) bool {
2274 return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= utf8.RuneSelf && unicode.IsLetter(ch)
2275 }
2276
2277
2278
2279
2280
2281
2282
2283 func isValidFieldName(fieldName string) bool {
2284 for i, c := range fieldName {
2285 if i == 0 && !isLetter(c) {
2286 return false
2287 }
2288
2289 if !(isLetter(c) || unicode.IsDigit(c)) {
2290 return false
2291 }
2292 }
2293
2294 return len(fieldName) > 0
2295 }
2296
2297
2298 func isRegularMemory(t Type) bool {
2299 switch t.Kind() {
2300 case Array:
2301 elem := t.Elem()
2302 if isRegularMemory(elem) {
2303 return true
2304 }
2305 return elem.Comparable() && t.Len() == 0
2306 case Int8, Int16, Int32, Int64, Int, Uint8, Uint16, Uint32, Uint64, Uint, Uintptr, Chan, Pointer, Bool, UnsafePointer:
2307 return true
2308 case Struct:
2309 num := t.NumField()
2310 switch num {
2311 case 0:
2312 return true
2313 case 1:
2314 field := t.Field(0)
2315 if field.Name == "_" {
2316 return false
2317 }
2318 return isRegularMemory(field.Type)
2319 default:
2320 for i := range num {
2321 field := t.Field(i)
2322 if field.Name == "_" || !isRegularMemory(field.Type) || isPaddedField(t, i) {
2323 return false
2324 }
2325 }
2326 return true
2327 }
2328 }
2329 return false
2330 }
2331
2332
2333
2334 func isPaddedField(t Type, i int) bool {
2335 field := t.Field(i)
2336 if i+1 < t.NumField() {
2337 return field.Offset+field.Type.Size() != t.Field(i+1).Offset
2338 }
2339 return field.Offset+field.Type.Size() != t.Size()
2340 }
2341
2342
2343
2344
2345
2346
2347
2348 func StructOf(fields []StructField) Type {
2349 var (
2350 hash = fnv1(0, []byte("struct {")...)
2351 size uintptr
2352 typalign uint8
2353 comparable = true
2354 methods []abi.Method
2355
2356 fs = make([]structField, len(fields))
2357 repr = make([]byte, 0, 64)
2358 fset = map[string]struct{}{}
2359
2360 hasGCProg = false
2361 )
2362
2363 lastzero := uintptr(0)
2364 repr = append(repr, "struct {"...)
2365 pkgpath := ""
2366 for i, field := range fields {
2367 if field.Name == "" {
2368 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no name")
2369 }
2370 if !isValidFieldName(field.Name) {
2371 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has invalid name")
2372 }
2373 if field.Type == nil {
2374 panic("reflect.StructOf: field " + strconv.Itoa(i) + " has no type")
2375 }
2376 f, fpkgpath := runtimeStructField(field)
2377 ft := f.Typ
2378 if ft.Kind_&abi.KindGCProg != 0 {
2379 hasGCProg = true
2380 }
2381 if fpkgpath != "" {
2382 if pkgpath == "" {
2383 pkgpath = fpkgpath
2384 } else if pkgpath != fpkgpath {
2385 panic("reflect.Struct: fields with different PkgPath " + pkgpath + " and " + fpkgpath)
2386 }
2387 }
2388
2389
2390 name := f.Name.Name()
2391 hash = fnv1(hash, []byte(name)...)
2392 if !f.Embedded() {
2393 repr = append(repr, (" " + name)...)
2394 } else {
2395
2396 if f.Typ.Kind() == abi.Pointer {
2397
2398 elem := ft.Elem()
2399 if k := elem.Kind(); k == abi.Pointer || k == abi.Interface {
2400 panic("reflect.StructOf: illegal embedded field type " + stringFor(ft))
2401 }
2402 }
2403
2404 switch Kind(f.Typ.Kind()) {
2405 case Interface:
2406 ift := (*interfaceType)(unsafe.Pointer(ft))
2407 for _, m := range ift.Methods {
2408 if pkgPath(ift.nameOff(m.Name)) != "" {
2409
2410 panic("reflect: embedded interface with unexported method(s) not implemented")
2411 }
2412
2413 fnStub := resolveReflectText(unsafe.Pointer(abi.FuncPCABIInternal(embeddedIfaceMethStub)))
2414 methods = append(methods, abi.Method{
2415 Name: resolveReflectName(ift.nameOff(m.Name)),
2416 Mtyp: resolveReflectType(ift.typeOff(m.Typ)),
2417 Ifn: fnStub,
2418 Tfn: fnStub,
2419 })
2420 }
2421 case Pointer:
2422 ptr := (*ptrType)(unsafe.Pointer(ft))
2423 if unt := ptr.Uncommon(); unt != nil {
2424 if i > 0 && unt.Mcount > 0 {
2425
2426 panic("reflect: embedded type with methods not implemented if type is not first field")
2427 }
2428 if len(fields) > 1 {
2429 panic("reflect: embedded type with methods not implemented if there is more than one field")
2430 }
2431 for _, m := range unt.Methods() {
2432 mname := nameOffFor(ft, m.Name)
2433 if pkgPath(mname) != "" {
2434
2435
2436 panic("reflect: embedded interface with unexported method(s) not implemented")
2437 }
2438 methods = append(methods, abi.Method{
2439 Name: resolveReflectName(mname),
2440 Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)),
2441 Ifn: resolveReflectText(textOffFor(ft, m.Ifn)),
2442 Tfn: resolveReflectText(textOffFor(ft, m.Tfn)),
2443 })
2444 }
2445 }
2446 if unt := ptr.Elem.Uncommon(); unt != nil {
2447 for _, m := range unt.Methods() {
2448 mname := nameOffFor(ft, m.Name)
2449 if pkgPath(mname) != "" {
2450
2451
2452 panic("reflect: embedded interface with unexported method(s) not implemented")
2453 }
2454 methods = append(methods, abi.Method{
2455 Name: resolveReflectName(mname),
2456 Mtyp: resolveReflectType(typeOffFor(ptr.Elem, m.Mtyp)),
2457 Ifn: resolveReflectText(textOffFor(ptr.Elem, m.Ifn)),
2458 Tfn: resolveReflectText(textOffFor(ptr.Elem, m.Tfn)),
2459 })
2460 }
2461 }
2462 default:
2463 if unt := ft.Uncommon(); unt != nil {
2464 if i > 0 && unt.Mcount > 0 {
2465
2466 panic("reflect: embedded type with methods not implemented if type is not first field")
2467 }
2468 if len(fields) > 1 && ft.Kind_&abi.KindDirectIface != 0 {
2469 panic("reflect: embedded type with methods not implemented for non-pointer type")
2470 }
2471 for _, m := range unt.Methods() {
2472 mname := nameOffFor(ft, m.Name)
2473 if pkgPath(mname) != "" {
2474
2475
2476 panic("reflect: embedded interface with unexported method(s) not implemented")
2477 }
2478 methods = append(methods, abi.Method{
2479 Name: resolveReflectName(mname),
2480 Mtyp: resolveReflectType(typeOffFor(ft, m.Mtyp)),
2481 Ifn: resolveReflectText(textOffFor(ft, m.Ifn)),
2482 Tfn: resolveReflectText(textOffFor(ft, m.Tfn)),
2483 })
2484
2485 }
2486 }
2487 }
2488 }
2489 if _, dup := fset[name]; dup && name != "_" {
2490 panic("reflect.StructOf: duplicate field " + name)
2491 }
2492 fset[name] = struct{}{}
2493
2494 hash = fnv1(hash, byte(ft.Hash>>24), byte(ft.Hash>>16), byte(ft.Hash>>8), byte(ft.Hash))
2495
2496 repr = append(repr, (" " + stringFor(ft))...)
2497 if f.Name.HasTag() {
2498 hash = fnv1(hash, []byte(f.Name.Tag())...)
2499 repr = append(repr, (" " + strconv.Quote(f.Name.Tag()))...)
2500 }
2501 if i < len(fields)-1 {
2502 repr = append(repr, ';')
2503 }
2504
2505 comparable = comparable && (ft.Equal != nil)
2506
2507 offset := align(size, uintptr(ft.Align_))
2508 if offset < size {
2509 panic("reflect.StructOf: struct size would exceed virtual address space")
2510 }
2511 if ft.Align_ > typalign {
2512 typalign = ft.Align_
2513 }
2514 size = offset + ft.Size_
2515 if size < offset {
2516 panic("reflect.StructOf: struct size would exceed virtual address space")
2517 }
2518 f.Offset = offset
2519
2520 if ft.Size_ == 0 {
2521 lastzero = size
2522 }
2523
2524 fs[i] = f
2525 }
2526
2527 if size > 0 && lastzero == size {
2528
2529
2530
2531
2532
2533 size++
2534 if size == 0 {
2535 panic("reflect.StructOf: struct size would exceed virtual address space")
2536 }
2537 }
2538
2539 var typ *structType
2540 var ut *uncommonType
2541
2542 if len(methods) == 0 {
2543 t := new(structTypeUncommon)
2544 typ = &t.structType
2545 ut = &t.u
2546 } else {
2547
2548
2549
2550
2551
2552 tt := New(StructOf([]StructField{
2553 {Name: "S", Type: TypeOf(structType{})},
2554 {Name: "U", Type: TypeOf(uncommonType{})},
2555 {Name: "M", Type: ArrayOf(len(methods), TypeOf(methods[0]))},
2556 }))
2557
2558 typ = (*structType)(tt.Elem().Field(0).Addr().UnsafePointer())
2559 ut = (*uncommonType)(tt.Elem().Field(1).Addr().UnsafePointer())
2560
2561 copy(tt.Elem().Field(2).Slice(0, len(methods)).Interface().([]abi.Method), methods)
2562 }
2563
2564
2565
2566
2567 ut.Mcount = uint16(len(methods))
2568 ut.Xcount = ut.Mcount
2569 ut.Moff = uint32(unsafe.Sizeof(uncommonType{}))
2570
2571 if len(fs) > 0 {
2572 repr = append(repr, ' ')
2573 }
2574 repr = append(repr, '}')
2575 hash = fnv1(hash, '}')
2576 str := string(repr)
2577
2578
2579 s := align(size, uintptr(typalign))
2580 if s < size {
2581 panic("reflect.StructOf: struct size would exceed virtual address space")
2582 }
2583 size = s
2584
2585
2586 var istruct any = struct{}{}
2587 prototype := *(**structType)(unsafe.Pointer(&istruct))
2588 *typ = *prototype
2589 typ.Fields = fs
2590 if pkgpath != "" {
2591 typ.PkgPath = newName(pkgpath, "", false, false)
2592 }
2593
2594
2595 if ts, ok := structLookupCache.m.Load(hash); ok {
2596 for _, st := range ts.([]Type) {
2597 t := st.common()
2598 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2599 return toType(t)
2600 }
2601 }
2602 }
2603
2604
2605 structLookupCache.Lock()
2606 defer structLookupCache.Unlock()
2607 if ts, ok := structLookupCache.m.Load(hash); ok {
2608 for _, st := range ts.([]Type) {
2609 t := st.common()
2610 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2611 return toType(t)
2612 }
2613 }
2614 }
2615
2616 addToCache := func(t Type) Type {
2617 var ts []Type
2618 if ti, ok := structLookupCache.m.Load(hash); ok {
2619 ts = ti.([]Type)
2620 }
2621 structLookupCache.m.Store(hash, append(ts, t))
2622 return t
2623 }
2624
2625
2626 for _, t := range typesByString(str) {
2627 if haveIdenticalUnderlyingType(&typ.Type, t, true) {
2628
2629
2630
2631 return addToCache(toType(t))
2632 }
2633 }
2634
2635 typ.Str = resolveReflectName(newName(str, "", false, false))
2636 if isRegularMemory(toType(&typ.Type)) {
2637 typ.TFlag = abi.TFlagRegularMemory
2638 } else {
2639 typ.TFlag = 0
2640 }
2641 typ.Hash = hash
2642 typ.Size_ = size
2643 typ.PtrBytes = typeptrdata(&typ.Type)
2644 typ.Align_ = typalign
2645 typ.FieldAlign_ = typalign
2646 typ.PtrToThis = 0
2647 if len(methods) > 0 {
2648 typ.TFlag |= abi.TFlagUncommon
2649 }
2650
2651 if hasGCProg {
2652 lastPtrField := 0
2653 for i, ft := range fs {
2654 if ft.Typ.Pointers() {
2655 lastPtrField = i
2656 }
2657 }
2658 prog := []byte{0, 0, 0, 0}
2659 var off uintptr
2660 for i, ft := range fs {
2661 if i > lastPtrField {
2662
2663
2664 break
2665 }
2666 if !ft.Typ.Pointers() {
2667
2668 continue
2669 }
2670
2671 if ft.Offset > off {
2672 n := (ft.Offset - off) / goarch.PtrSize
2673 prog = append(prog, 0x01, 0x00)
2674 if n > 1 {
2675 prog = append(prog, 0x81)
2676 prog = appendVarint(prog, n-1)
2677 }
2678 off = ft.Offset
2679 }
2680
2681 prog = appendGCProg(prog, ft.Typ)
2682 off += ft.Typ.PtrBytes
2683 }
2684 prog = append(prog, 0)
2685 *(*uint32)(unsafe.Pointer(&prog[0])) = uint32(len(prog) - 4)
2686 typ.Kind_ |= abi.KindGCProg
2687 typ.GCData = &prog[0]
2688 } else {
2689 typ.Kind_ &^= abi.KindGCProg
2690 bv := new(bitVector)
2691 addTypeBits(bv, 0, &typ.Type)
2692 if len(bv.data) > 0 {
2693 typ.GCData = &bv.data[0]
2694 }
2695 }
2696 typ.Equal = nil
2697 if comparable {
2698 typ.Equal = func(p, q unsafe.Pointer) bool {
2699 for _, ft := range typ.Fields {
2700 pi := add(p, ft.Offset, "&x.field safe")
2701 qi := add(q, ft.Offset, "&x.field safe")
2702 if !ft.Typ.Equal(pi, qi) {
2703 return false
2704 }
2705 }
2706 return true
2707 }
2708 }
2709
2710 switch {
2711 case len(fs) == 1 && !fs[0].Typ.IfaceIndir():
2712
2713 typ.Kind_ |= abi.KindDirectIface
2714 default:
2715 typ.Kind_ &^= abi.KindDirectIface
2716 }
2717
2718 return addToCache(toType(&typ.Type))
2719 }
2720
2721 func embeddedIfaceMethStub() {
2722 panic("reflect: StructOf does not support methods of embedded interfaces")
2723 }
2724
2725
2726
2727
2728 func runtimeStructField(field StructField) (structField, string) {
2729 if field.Anonymous && field.PkgPath != "" {
2730 panic("reflect.StructOf: field \"" + field.Name + "\" is anonymous but has PkgPath set")
2731 }
2732
2733 if field.IsExported() {
2734
2735
2736 c := field.Name[0]
2737 if 'a' <= c && c <= 'z' || c == '_' {
2738 panic("reflect.StructOf: field \"" + field.Name + "\" is unexported but missing PkgPath")
2739 }
2740 }
2741
2742 resolveReflectType(field.Type.common())
2743 f := structField{
2744 Name: newName(field.Name, string(field.Tag), field.IsExported(), field.Anonymous),
2745 Typ: field.Type.common(),
2746 Offset: 0,
2747 }
2748 return f, field.PkgPath
2749 }
2750
2751
2752
2753
2754 func typeptrdata(t *abi.Type) uintptr {
2755 switch t.Kind() {
2756 case abi.Struct:
2757 st := (*structType)(unsafe.Pointer(t))
2758
2759 field := -1
2760 for i := range st.Fields {
2761 ft := st.Fields[i].Typ
2762 if ft.Pointers() {
2763 field = i
2764 }
2765 }
2766 if field == -1 {
2767 return 0
2768 }
2769 f := st.Fields[field]
2770 return f.Offset + f.Typ.PtrBytes
2771
2772 default:
2773 panic("reflect.typeptrdata: unexpected type, " + stringFor(t))
2774 }
2775 }
2776
2777
2778
2779
2780
2781
2782 func ArrayOf(length int, elem Type) Type {
2783 if length < 0 {
2784 panic("reflect: negative length passed to ArrayOf")
2785 }
2786
2787 typ := elem.common()
2788
2789
2790 ckey := cacheKey{Array, typ, nil, uintptr(length)}
2791 if array, ok := lookupCache.Load(ckey); ok {
2792 return array.(Type)
2793 }
2794
2795
2796 s := "[" + strconv.Itoa(length) + "]" + stringFor(typ)
2797 for _, tt := range typesByString(s) {
2798 array := (*arrayType)(unsafe.Pointer(tt))
2799 if array.Elem == typ {
2800 ti, _ := lookupCache.LoadOrStore(ckey, toRType(tt))
2801 return ti.(Type)
2802 }
2803 }
2804
2805
2806 var iarray any = [1]unsafe.Pointer{}
2807 prototype := *(**arrayType)(unsafe.Pointer(&iarray))
2808 array := *prototype
2809 array.TFlag = typ.TFlag & abi.TFlagRegularMemory
2810 array.Str = resolveReflectName(newName(s, "", false, false))
2811 array.Hash = fnv1(typ.Hash, '[')
2812 for n := uint32(length); n > 0; n >>= 8 {
2813 array.Hash = fnv1(array.Hash, byte(n))
2814 }
2815 array.Hash = fnv1(array.Hash, ']')
2816 array.Elem = typ
2817 array.PtrToThis = 0
2818 if typ.Size_ > 0 {
2819 max := ^uintptr(0) / typ.Size_
2820 if uintptr(length) > max {
2821 panic("reflect.ArrayOf: array size would exceed virtual address space")
2822 }
2823 }
2824 array.Size_ = typ.Size_ * uintptr(length)
2825 if length > 0 && typ.Pointers() {
2826 array.PtrBytes = typ.Size_*uintptr(length-1) + typ.PtrBytes
2827 }
2828 array.Align_ = typ.Align_
2829 array.FieldAlign_ = typ.FieldAlign_
2830 array.Len = uintptr(length)
2831 array.Slice = &(SliceOf(elem).(*rtype).t)
2832
2833 switch {
2834 case !typ.Pointers() || array.Size_ == 0:
2835
2836 array.GCData = nil
2837 array.PtrBytes = 0
2838
2839 case length == 1:
2840
2841 array.Kind_ |= typ.Kind_ & abi.KindGCProg
2842 array.GCData = typ.GCData
2843 array.PtrBytes = typ.PtrBytes
2844
2845 case typ.Kind_&abi.KindGCProg == 0 && array.Size_ <= abi.MaxPtrmaskBytes*8*goarch.PtrSize:
2846
2847
2848
2849 n := (array.PtrBytes/goarch.PtrSize + 7) / 8
2850
2851 n = (n + goarch.PtrSize - 1) &^ (goarch.PtrSize - 1)
2852 mask := make([]byte, n)
2853 emitGCMask(mask, 0, typ, array.Len)
2854 array.GCData = &mask[0]
2855
2856 default:
2857
2858
2859 prog := []byte{0, 0, 0, 0}
2860 prog = appendGCProg(prog, typ)
2861
2862 elemPtrs := typ.PtrBytes / goarch.PtrSize
2863 elemWords := typ.Size_ / goarch.PtrSize
2864 if elemPtrs < elemWords {
2865
2866 prog = append(prog, 0x01, 0x00)
2867 if elemPtrs+1 < elemWords {
2868 prog = append(prog, 0x81)
2869 prog = appendVarint(prog, elemWords-elemPtrs-1)
2870 }
2871 }
2872
2873 if elemWords < 0x80 {
2874 prog = append(prog, byte(elemWords|0x80))
2875 } else {
2876 prog = append(prog, 0x80)
2877 prog = appendVarint(prog, elemWords)
2878 }
2879 prog = appendVarint(prog, uintptr(length)-1)
2880 prog = append(prog, 0)
2881 *(*uint32)(unsafe.Pointer(&prog[0])) = uint32(len(prog) - 4)
2882 array.Kind_ |= abi.KindGCProg
2883 array.GCData = &prog[0]
2884 array.PtrBytes = array.Size_
2885 }
2886
2887 etyp := typ
2888 esize := etyp.Size()
2889
2890 array.Equal = nil
2891 if eequal := etyp.Equal; eequal != nil {
2892 array.Equal = func(p, q unsafe.Pointer) bool {
2893 for i := 0; i < length; i++ {
2894 pi := arrayAt(p, i, esize, "i < length")
2895 qi := arrayAt(q, i, esize, "i < length")
2896 if !eequal(pi, qi) {
2897 return false
2898 }
2899
2900 }
2901 return true
2902 }
2903 }
2904
2905 switch {
2906 case length == 1 && !typ.IfaceIndir():
2907
2908 array.Kind_ |= abi.KindDirectIface
2909 default:
2910 array.Kind_ &^= abi.KindDirectIface
2911 }
2912
2913 ti, _ := lookupCache.LoadOrStore(ckey, toRType(&array.Type))
2914 return ti.(Type)
2915 }
2916
2917 func appendVarint(x []byte, v uintptr) []byte {
2918 for ; v >= 0x80; v >>= 7 {
2919 x = append(x, byte(v|0x80))
2920 }
2921 x = append(x, byte(v))
2922 return x
2923 }
2924
2925
2926
2927
2928
2929
2930
2931
2932
2933
2934
2935
2936
2937
2938
2939
2940
2941
2942
2943 func toType(t *abi.Type) Type {
2944 if t == nil {
2945 return nil
2946 }
2947 return toRType(t)
2948 }
2949
2950 type layoutKey struct {
2951 ftyp *funcType
2952 rcvr *abi.Type
2953 }
2954
2955 type layoutType struct {
2956 t *abi.Type
2957 framePool *sync.Pool
2958 abid abiDesc
2959 }
2960
2961 var layoutCache sync.Map
2962
2963
2964
2965
2966
2967
2968
2969
2970 func funcLayout(t *funcType, rcvr *abi.Type) (frametype *abi.Type, framePool *sync.Pool, abid abiDesc) {
2971 if t.Kind() != abi.Func {
2972 panic("reflect: funcLayout of non-func type " + stringFor(&t.Type))
2973 }
2974 if rcvr != nil && rcvr.Kind() == abi.Interface {
2975 panic("reflect: funcLayout with interface receiver " + stringFor(rcvr))
2976 }
2977 k := layoutKey{t, rcvr}
2978 if lti, ok := layoutCache.Load(k); ok {
2979 lt := lti.(layoutType)
2980 return lt.t, lt.framePool, lt.abid
2981 }
2982
2983
2984 abid = newAbiDesc(t, rcvr)
2985
2986
2987 x := &abi.Type{
2988 Align_: goarch.PtrSize,
2989
2990
2991
2992
2993 Size_: align(abid.retOffset+abid.ret.stackBytes, goarch.PtrSize),
2994 PtrBytes: uintptr(abid.stackPtrs.n) * goarch.PtrSize,
2995 }
2996 if abid.stackPtrs.n > 0 {
2997 x.GCData = &abid.stackPtrs.data[0]
2998 }
2999
3000 var s string
3001 if rcvr != nil {
3002 s = "methodargs(" + stringFor(rcvr) + ")(" + stringFor(&t.Type) + ")"
3003 } else {
3004 s = "funcargs(" + stringFor(&t.Type) + ")"
3005 }
3006 x.Str = resolveReflectName(newName(s, "", false, false))
3007
3008
3009 framePool = &sync.Pool{New: func() any {
3010 return unsafe_New(x)
3011 }}
3012 lti, _ := layoutCache.LoadOrStore(k, layoutType{
3013 t: x,
3014 framePool: framePool,
3015 abid: abid,
3016 })
3017 lt := lti.(layoutType)
3018 return lt.t, lt.framePool, lt.abid
3019 }
3020
3021
3022 type bitVector struct {
3023 n uint32
3024 data []byte
3025 }
3026
3027
3028 func (bv *bitVector) append(bit uint8) {
3029 if bv.n%(8*goarch.PtrSize) == 0 {
3030
3031
3032
3033 for i := 0; i < goarch.PtrSize; i++ {
3034 bv.data = append(bv.data, 0)
3035 }
3036 }
3037 bv.data[bv.n/8] |= bit << (bv.n % 8)
3038 bv.n++
3039 }
3040
3041 func addTypeBits(bv *bitVector, offset uintptr, t *abi.Type) {
3042 if !t.Pointers() {
3043 return
3044 }
3045
3046 switch Kind(t.Kind_ & abi.KindMask) {
3047 case Chan, Func, Map, Pointer, Slice, String, UnsafePointer:
3048
3049 for bv.n < uint32(offset/goarch.PtrSize) {
3050 bv.append(0)
3051 }
3052 bv.append(1)
3053
3054 case Interface:
3055
3056 for bv.n < uint32(offset/goarch.PtrSize) {
3057 bv.append(0)
3058 }
3059 bv.append(1)
3060 bv.append(1)
3061
3062 case Array:
3063
3064 tt := (*arrayType)(unsafe.Pointer(t))
3065 for i := 0; i < int(tt.Len); i++ {
3066 addTypeBits(bv, offset+uintptr(i)*tt.Elem.Size_, tt.Elem)
3067 }
3068
3069 case Struct:
3070
3071 tt := (*structType)(unsafe.Pointer(t))
3072 for i := range tt.Fields {
3073 f := &tt.Fields[i]
3074 addTypeBits(bv, offset+f.Offset, f.Typ)
3075 }
3076 }
3077 }
3078
3079
3080 func TypeFor[T any]() Type {
3081 var v T
3082 if t := TypeOf(v); t != nil {
3083 return t
3084 }
3085 return TypeOf((*T)(nil)).Elem()
3086 }
3087
View as plain text