Source file
src/reflect/value.go
1
2
3
4
5 package reflect
6
7 import (
8 "errors"
9 "internal/abi"
10 "internal/goarch"
11 "internal/itoa"
12 "internal/unsafeheader"
13 "math"
14 "runtime"
15 "unsafe"
16 )
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39 type Value struct {
40
41
42 typ_ *abi.Type
43
44
45
46 ptr unsafe.Pointer
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62 flag
63
64
65
66
67
68
69 }
70
71 type flag uintptr
72
73 const (
74 flagKindWidth = 5
75 flagKindMask flag = 1<<flagKindWidth - 1
76 flagStickyRO flag = 1 << 5
77 flagEmbedRO flag = 1 << 6
78 flagIndir flag = 1 << 7
79 flagAddr flag = 1 << 8
80 flagMethod flag = 1 << 9
81 flagMethodShift = 10
82 flagRO flag = flagStickyRO | flagEmbedRO
83 )
84
85 func (f flag) kind() Kind {
86 return Kind(f & flagKindMask)
87 }
88
89 func (f flag) ro() flag {
90 if f&flagRO != 0 {
91 return flagStickyRO
92 }
93 return 0
94 }
95
96 func (v Value) typ() *abi.Type {
97
98
99
100
101
102 return (*abi.Type)(abi.NoEscape(unsafe.Pointer(v.typ_)))
103 }
104
105
106
107
108 func (v Value) pointer() unsafe.Pointer {
109 if v.typ().Size() != goarch.PtrSize || !v.typ().Pointers() {
110 panic("can't call pointer on a non-pointer Value")
111 }
112 if v.flag&flagIndir != 0 {
113 return *(*unsafe.Pointer)(v.ptr)
114 }
115 return v.ptr
116 }
117
118
119 func packEface(v Value) any {
120 t := v.typ()
121 var i any
122 e := (*abi.EmptyInterface)(unsafe.Pointer(&i))
123
124 switch {
125 case t.IfaceIndir():
126 if v.flag&flagIndir == 0 {
127 panic("bad indir")
128 }
129
130 ptr := v.ptr
131 if v.flag&flagAddr != 0 {
132 c := unsafe_New(t)
133 typedmemmove(t, c, ptr)
134 ptr = c
135 }
136 e.Data = ptr
137 case v.flag&flagIndir != 0:
138
139
140 e.Data = *(*unsafe.Pointer)(v.ptr)
141 default:
142
143 e.Data = v.ptr
144 }
145
146
147
148
149 e.Type = t
150 return i
151 }
152
153
154 func unpackEface(i any) Value {
155 e := (*abi.EmptyInterface)(unsafe.Pointer(&i))
156
157 t := e.Type
158 if t == nil {
159 return Value{}
160 }
161 f := flag(t.Kind())
162 if t.IfaceIndir() {
163 f |= flagIndir
164 }
165 return Value{t, e.Data, f}
166 }
167
168
169
170
171 type ValueError struct {
172 Method string
173 Kind Kind
174 }
175
176 func (e *ValueError) Error() string {
177 if e.Kind == 0 {
178 return "reflect: call of " + e.Method + " on zero Value"
179 }
180 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
181 }
182
183
184 func valueMethodName() string {
185 var pc [5]uintptr
186 n := runtime.Callers(1, pc[:])
187 frames := runtime.CallersFrames(pc[:n])
188 var frame runtime.Frame
189 for more := true; more; {
190 const prefix = "reflect.Value."
191 frame, more = frames.Next()
192 name := frame.Function
193 if len(name) > len(prefix) && name[:len(prefix)] == prefix {
194 methodName := name[len(prefix):]
195 if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' {
196 return name
197 }
198 }
199 }
200 return "unknown method"
201 }
202
203
204 type nonEmptyInterface struct {
205 itab *abi.ITab
206 word unsafe.Pointer
207 }
208
209
210
211
212
213
214
215 func (f flag) mustBe(expected Kind) {
216
217 if Kind(f&flagKindMask) != expected {
218 panic(&ValueError{valueMethodName(), f.kind()})
219 }
220 }
221
222
223
224 func (f flag) mustBeExported() {
225 if f == 0 || f&flagRO != 0 {
226 f.mustBeExportedSlow()
227 }
228 }
229
230 func (f flag) mustBeExportedSlow() {
231 if f == 0 {
232 panic(&ValueError{valueMethodName(), Invalid})
233 }
234 if f&flagRO != 0 {
235 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
236 }
237 }
238
239
240
241
242 func (f flag) mustBeAssignable() {
243 if f&flagRO != 0 || f&flagAddr == 0 {
244 f.mustBeAssignableSlow()
245 }
246 }
247
248 func (f flag) mustBeAssignableSlow() {
249 if f == 0 {
250 panic(&ValueError{valueMethodName(), Invalid})
251 }
252
253 if f&flagRO != 0 {
254 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
255 }
256 if f&flagAddr == 0 {
257 panic("reflect: " + valueMethodName() + " using unaddressable value")
258 }
259 }
260
261
262
263
264
265
266 func (v Value) Addr() Value {
267 if v.flag&flagAddr == 0 {
268 panic("reflect.Value.Addr of unaddressable value")
269 }
270
271
272 fl := v.flag & flagRO
273 return Value{ptrTo(v.typ()), v.ptr, fl | flag(Pointer)}
274 }
275
276
277
278 func (v Value) Bool() bool {
279
280 if v.kind() != Bool {
281 v.panicNotBool()
282 }
283 return *(*bool)(v.ptr)
284 }
285
286 func (v Value) panicNotBool() {
287 v.mustBe(Bool)
288 }
289
290 var bytesType = rtypeOf(([]byte)(nil))
291
292
293
294
295 func (v Value) Bytes() []byte {
296
297 if v.typ_ == bytesType {
298 return *(*[]byte)(v.ptr)
299 }
300 return v.bytesSlow()
301 }
302
303 func (v Value) bytesSlow() []byte {
304 switch v.kind() {
305 case Slice:
306 if v.typ().Elem().Kind() != abi.Uint8 {
307 panic("reflect.Value.Bytes of non-byte slice")
308 }
309
310 return *(*[]byte)(v.ptr)
311 case Array:
312 if v.typ().Elem().Kind() != abi.Uint8 {
313 panic("reflect.Value.Bytes of non-byte array")
314 }
315 if !v.CanAddr() {
316 panic("reflect.Value.Bytes of unaddressable byte array")
317 }
318 p := (*byte)(v.ptr)
319 n := int((*arrayType)(unsafe.Pointer(v.typ())).Len)
320 return unsafe.Slice(p, n)
321 }
322 panic(&ValueError{"reflect.Value.Bytes", v.kind()})
323 }
324
325
326
327 func (v Value) runes() []rune {
328 v.mustBe(Slice)
329 if v.typ().Elem().Kind() != abi.Int32 {
330 panic("reflect.Value.Bytes of non-rune slice")
331 }
332
333 return *(*[]rune)(v.ptr)
334 }
335
336
337
338
339
340
341 func (v Value) CanAddr() bool {
342 return v.flag&flagAddr != 0
343 }
344
345
346
347
348
349
350 func (v Value) CanSet() bool {
351 return v.flag&(flagAddr|flagRO) == flagAddr
352 }
353
354
355
356
357
358
359
360
361
362 func (v Value) Call(in []Value) []Value {
363 v.mustBe(Func)
364 v.mustBeExported()
365 return v.call("Call", in)
366 }
367
368
369
370
371
372
373
374
375 func (v Value) CallSlice(in []Value) []Value {
376 v.mustBe(Func)
377 v.mustBeExported()
378 return v.call("CallSlice", in)
379 }
380
381 var callGC bool
382
383 const debugReflectCall = false
384
385 func (v Value) call(op string, in []Value) []Value {
386
387 t := (*funcType)(unsafe.Pointer(v.typ()))
388 var (
389 fn unsafe.Pointer
390 rcvr Value
391 rcvrtype *abi.Type
392 )
393 if v.flag&flagMethod != 0 {
394 rcvr = v
395 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
396 } else if v.flag&flagIndir != 0 {
397 fn = *(*unsafe.Pointer)(v.ptr)
398 } else {
399 fn = v.ptr
400 }
401
402 if fn == nil {
403 panic("reflect.Value.Call: call of nil function")
404 }
405
406 isSlice := op == "CallSlice"
407 n := t.NumIn()
408 isVariadic := t.IsVariadic()
409 if isSlice {
410 if !isVariadic {
411 panic("reflect: CallSlice of non-variadic function")
412 }
413 if len(in) < n {
414 panic("reflect: CallSlice with too few input arguments")
415 }
416 if len(in) > n {
417 panic("reflect: CallSlice with too many input arguments")
418 }
419 } else {
420 if isVariadic {
421 n--
422 }
423 if len(in) < n {
424 panic("reflect: Call with too few input arguments")
425 }
426 if !isVariadic && len(in) > n {
427 panic("reflect: Call with too many input arguments")
428 }
429 }
430 for _, x := range in {
431 if x.Kind() == Invalid {
432 panic("reflect: " + op + " using zero Value argument")
433 }
434 }
435 for i := 0; i < n; i++ {
436 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(toRType(targ)) {
437 panic("reflect: " + op + " using " + xt.String() + " as type " + stringFor(targ))
438 }
439 }
440 if !isSlice && isVariadic {
441
442 m := len(in) - n
443 slice := MakeSlice(toRType(t.In(n)), m, m)
444 elem := toRType(t.In(n)).Elem()
445 for i := 0; i < m; i++ {
446 x := in[n+i]
447 if xt := x.Type(); !xt.AssignableTo(elem) {
448 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
449 }
450 slice.Index(i).Set(x)
451 }
452 origIn := in
453 in = make([]Value, n+1)
454 copy(in[:n], origIn)
455 in[n] = slice
456 }
457
458 nin := len(in)
459 if nin != t.NumIn() {
460 panic("reflect.Value.Call: wrong argument count")
461 }
462 nout := t.NumOut()
463
464
465 var regArgs abi.RegArgs
466
467
468 frametype, framePool, abid := funcLayout(t, rcvrtype)
469
470
471 var stackArgs unsafe.Pointer
472 if frametype.Size() != 0 {
473 if nout == 0 {
474 stackArgs = framePool.Get().(unsafe.Pointer)
475 } else {
476
477
478 stackArgs = unsafe_New(frametype)
479 }
480 }
481 frameSize := frametype.Size()
482
483 if debugReflectCall {
484 println("reflect.call", stringFor(&t.Type))
485 abid.dump()
486 }
487
488
489
490
491 inStart := 0
492 if rcvrtype != nil {
493
494
495
496 switch st := abid.call.steps[0]; st.kind {
497 case abiStepStack:
498 storeRcvr(rcvr, stackArgs)
499 case abiStepPointer:
500 storeRcvr(rcvr, unsafe.Pointer(®Args.Ptrs[st.ireg]))
501 fallthrough
502 case abiStepIntReg:
503 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
504 case abiStepFloatReg:
505 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
506 default:
507 panic("unknown ABI parameter kind")
508 }
509 inStart = 1
510 }
511
512
513 for i, v := range in {
514 v.mustBeExported()
515 targ := toRType(t.In(i))
516
517
518
519 v = v.assignTo("reflect.Value.Call", &targ.t, nil)
520 stepsLoop:
521 for _, st := range abid.call.stepsForValue(i + inStart) {
522 switch st.kind {
523 case abiStepStack:
524
525 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
526 if v.flag&flagIndir != 0 {
527 typedmemmove(&targ.t, addr, v.ptr)
528 } else {
529 *(*unsafe.Pointer)(addr) = v.ptr
530 }
531
532 break stepsLoop
533 case abiStepIntReg, abiStepPointer:
534
535 if v.flag&flagIndir != 0 {
536 offset := add(v.ptr, st.offset, "precomputed value offset")
537 if st.kind == abiStepPointer {
538
539
540
541 regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset)
542 }
543 intToReg(®Args, st.ireg, st.size, offset)
544 } else {
545 if st.kind == abiStepPointer {
546
547 regArgs.Ptrs[st.ireg] = v.ptr
548 }
549 regArgs.Ints[st.ireg] = uintptr(v.ptr)
550 }
551 case abiStepFloatReg:
552
553 if v.flag&flagIndir == 0 {
554 panic("attempted to copy pointer to FP register")
555 }
556 offset := add(v.ptr, st.offset, "precomputed value offset")
557 floatToReg(®Args, st.freg, st.size, offset)
558 default:
559 panic("unknown ABI part kind")
560 }
561 }
562 }
563
564
565 frameSize = align(frameSize, goarch.PtrSize)
566 frameSize += abid.spill
567
568
569 regArgs.ReturnIsPtr = abid.outRegPtrs
570
571 if debugReflectCall {
572 regArgs.Dump()
573 }
574
575
576 if callGC {
577 runtime.GC()
578 }
579
580
581 call(frametype, fn, stackArgs, uint32(frametype.Size()), uint32(abid.retOffset), uint32(frameSize), ®Args)
582
583
584 if callGC {
585 runtime.GC()
586 }
587
588 var ret []Value
589 if nout == 0 {
590 if stackArgs != nil {
591 typedmemclr(frametype, stackArgs)
592 framePool.Put(stackArgs)
593 }
594 } else {
595 if stackArgs != nil {
596
597
598
599 typedmemclrpartial(frametype, stackArgs, 0, abid.retOffset)
600 }
601
602
603 ret = make([]Value, nout)
604 for i := 0; i < nout; i++ {
605 tv := t.Out(i)
606 if tv.Size() == 0 {
607
608
609 ret[i] = Zero(toRType(tv))
610 continue
611 }
612 steps := abid.ret.stepsForValue(i)
613 if st := steps[0]; st.kind == abiStepStack {
614
615
616
617 fl := flagIndir | flag(tv.Kind())
618 ret[i] = Value{tv, add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
619
620
621
622
623 continue
624 }
625
626
627 if !tv.IfaceIndir() {
628
629
630 if steps[0].kind != abiStepPointer {
631 print("kind=", steps[0].kind, ", type=", stringFor(tv), "\n")
632 panic("mismatch between ABI description and types")
633 }
634 ret[i] = Value{tv, regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
635 continue
636 }
637
638
639
640
641
642
643
644
645
646
647 s := unsafe_New(tv)
648 for _, st := range steps {
649 switch st.kind {
650 case abiStepIntReg:
651 offset := add(s, st.offset, "precomputed value offset")
652 intFromReg(®Args, st.ireg, st.size, offset)
653 case abiStepPointer:
654 s := add(s, st.offset, "precomputed value offset")
655 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
656 case abiStepFloatReg:
657 offset := add(s, st.offset, "precomputed value offset")
658 floatFromReg(®Args, st.freg, st.size, offset)
659 case abiStepStack:
660 panic("register-based return value has stack component")
661 default:
662 panic("unknown ABI part kind")
663 }
664 }
665 ret[i] = Value{tv, s, flagIndir | flag(tv.Kind())}
666 }
667 }
668
669 return ret
670 }
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
693 if callGC {
694
695
696
697
698
699 runtime.GC()
700 }
701 ftyp := ctxt.ftyp
702 f := ctxt.fn
703
704 _, _, abid := funcLayout(ftyp, nil)
705
706
707 ptr := frame
708 in := make([]Value, 0, int(ftyp.InCount))
709 for i, typ := range ftyp.InSlice() {
710 if typ.Size() == 0 {
711 in = append(in, Zero(toRType(typ)))
712 continue
713 }
714 v := Value{typ, nil, flag(typ.Kind())}
715 steps := abid.call.stepsForValue(i)
716 if st := steps[0]; st.kind == abiStepStack {
717 if typ.IfaceIndir() {
718
719
720
721
722 v.ptr = unsafe_New(typ)
723 if typ.Size() > 0 {
724 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
725 }
726 v.flag |= flagIndir
727 } else {
728 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
729 }
730 } else {
731 if typ.IfaceIndir() {
732
733
734 v.flag |= flagIndir
735 v.ptr = unsafe_New(typ)
736 for _, st := range steps {
737 switch st.kind {
738 case abiStepIntReg:
739 offset := add(v.ptr, st.offset, "precomputed value offset")
740 intFromReg(regs, st.ireg, st.size, offset)
741 case abiStepPointer:
742 s := add(v.ptr, st.offset, "precomputed value offset")
743 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
744 case abiStepFloatReg:
745 offset := add(v.ptr, st.offset, "precomputed value offset")
746 floatFromReg(regs, st.freg, st.size, offset)
747 case abiStepStack:
748 panic("register-based return value has stack component")
749 default:
750 panic("unknown ABI part kind")
751 }
752 }
753 } else {
754
755
756 if steps[0].kind != abiStepPointer {
757 print("kind=", steps[0].kind, ", type=", stringFor(typ), "\n")
758 panic("mismatch between ABI description and types")
759 }
760 v.ptr = regs.Ptrs[steps[0].ireg]
761 }
762 }
763 in = append(in, v)
764 }
765
766
767 out := f(in)
768 numOut := ftyp.NumOut()
769 if len(out) != numOut {
770 panic("reflect: wrong return count from function created by MakeFunc")
771 }
772
773
774 if numOut > 0 {
775 for i, typ := range ftyp.OutSlice() {
776 v := out[i]
777 if v.typ() == nil {
778 panic("reflect: function created by MakeFunc using " + funcName(f) +
779 " returned zero Value")
780 }
781 if v.flag&flagRO != 0 {
782 panic("reflect: function created by MakeFunc using " + funcName(f) +
783 " returned value obtained from unexported field")
784 }
785 if typ.Size() == 0 {
786 continue
787 }
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803 v = v.assignTo("reflect.MakeFunc", typ, nil)
804 stepsLoop:
805 for _, st := range abid.ret.stepsForValue(i) {
806 switch st.kind {
807 case abiStepStack:
808
809 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
810
811
812
813
814 if v.flag&flagIndir != 0 {
815 memmove(addr, v.ptr, st.size)
816 } else {
817
818 *(*uintptr)(addr) = uintptr(v.ptr)
819 }
820
821 break stepsLoop
822 case abiStepIntReg, abiStepPointer:
823
824 if v.flag&flagIndir != 0 {
825 offset := add(v.ptr, st.offset, "precomputed value offset")
826 intToReg(regs, st.ireg, st.size, offset)
827 } else {
828
829
830
831
832
833 regs.Ints[st.ireg] = uintptr(v.ptr)
834 }
835 case abiStepFloatReg:
836
837 if v.flag&flagIndir == 0 {
838 panic("attempted to copy pointer to FP register")
839 }
840 offset := add(v.ptr, st.offset, "precomputed value offset")
841 floatToReg(regs, st.freg, st.size, offset)
842 default:
843 panic("unknown ABI part kind")
844 }
845 }
846 }
847 }
848
849
850
851 *retValid = true
852
853
854
855
856
857 runtime.KeepAlive(out)
858
859
860
861
862 runtime.KeepAlive(ctxt)
863 }
864
865
866
867
868
869
870
871
872 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t *funcType, fn unsafe.Pointer) {
873 i := methodIndex
874 if v.typ().Kind() == abi.Interface {
875 tt := (*interfaceType)(unsafe.Pointer(v.typ()))
876 if uint(i) >= uint(len(tt.Methods)) {
877 panic("reflect: internal error: invalid method index")
878 }
879 m := &tt.Methods[i]
880 if !tt.nameOff(m.Name).IsExported() {
881 panic("reflect: " + op + " of unexported method")
882 }
883 iface := (*nonEmptyInterface)(v.ptr)
884 if iface.itab == nil {
885 panic("reflect: " + op + " of method on nil interface value")
886 }
887 rcvrtype = iface.itab.Type
888 fn = unsafe.Pointer(&unsafe.Slice(&iface.itab.Fun[0], i+1)[i])
889 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.Typ)))
890 } else {
891 rcvrtype = v.typ()
892 ms := v.typ().ExportedMethods()
893 if uint(i) >= uint(len(ms)) {
894 panic("reflect: internal error: invalid method index")
895 }
896 m := ms[i]
897 if !nameOffFor(v.typ(), m.Name).IsExported() {
898 panic("reflect: " + op + " of unexported method")
899 }
900 ifn := textOffFor(v.typ(), m.Ifn)
901 fn = unsafe.Pointer(&ifn)
902 t = (*funcType)(unsafe.Pointer(typeOffFor(v.typ(), m.Mtyp)))
903 }
904 return
905 }
906
907
908
909
910
911 func storeRcvr(v Value, p unsafe.Pointer) {
912 t := v.typ()
913 if t.Kind() == abi.Interface {
914
915 iface := (*nonEmptyInterface)(v.ptr)
916 *(*unsafe.Pointer)(p) = iface.word
917 } else if v.flag&flagIndir != 0 && !t.IfaceIndir() {
918 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
919 } else {
920 *(*unsafe.Pointer)(p) = v.ptr
921 }
922 }
923
924
925
926 func align(x, n uintptr) uintptr {
927 return (x + n - 1) &^ (n - 1)
928 }
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
950 rcvr := ctxt.rcvr
951 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
952
953
954
955
956
957
958
959
960
961 _, _, valueABI := funcLayout(valueFuncType, nil)
962 valueFrame, valueRegs := frame, regs
963 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
964
965
966
967 methodFrame := methodFramePool.Get().(unsafe.Pointer)
968 var methodRegs abi.RegArgs
969
970
971 switch st := methodABI.call.steps[0]; st.kind {
972 case abiStepStack:
973
974
975 storeRcvr(rcvr, methodFrame)
976 case abiStepPointer:
977
978 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ptrs[st.ireg]))
979 fallthrough
980 case abiStepIntReg:
981 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints[st.ireg]))
982 case abiStepFloatReg:
983 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Floats[st.freg]))
984 default:
985 panic("unknown ABI parameter kind")
986 }
987
988
989 for i, t := range valueFuncType.InSlice() {
990 valueSteps := valueABI.call.stepsForValue(i)
991 methodSteps := methodABI.call.stepsForValue(i + 1)
992
993
994 if len(valueSteps) == 0 {
995 if len(methodSteps) != 0 {
996 panic("method ABI and value ABI do not align")
997 }
998 continue
999 }
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
1012 mStep := methodSteps[0]
1013
1014 if mStep.kind == abiStepStack {
1015 if vStep.size != mStep.size {
1016 panic("method ABI and value ABI do not align")
1017 }
1018 typedmemmove(t,
1019 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
1020 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
1021 continue
1022 }
1023
1024 for _, mStep := range methodSteps {
1025 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
1026 switch mStep.kind {
1027 case abiStepPointer:
1028
1029 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
1030 fallthrough
1031 case abiStepIntReg:
1032 intToReg(&methodRegs, mStep.ireg, mStep.size, from)
1033 case abiStepFloatReg:
1034 floatToReg(&methodRegs, mStep.freg, mStep.size, from)
1035 default:
1036 panic("unexpected method step")
1037 }
1038 }
1039 continue
1040 }
1041
1042 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1043 for _, vStep := range valueSteps {
1044 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1045 switch vStep.kind {
1046 case abiStepPointer:
1047
1048 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1049 case abiStepIntReg:
1050 intFromReg(valueRegs, vStep.ireg, vStep.size, to)
1051 case abiStepFloatReg:
1052 floatFromReg(valueRegs, vStep.freg, vStep.size, to)
1053 default:
1054 panic("unexpected value step")
1055 }
1056 }
1057 continue
1058 }
1059
1060 if len(valueSteps) != len(methodSteps) {
1061
1062
1063
1064 panic("method ABI and value ABI don't align")
1065 }
1066 for i, vStep := range valueSteps {
1067 mStep := methodSteps[i]
1068 if mStep.kind != vStep.kind {
1069 panic("method ABI and value ABI don't align")
1070 }
1071 switch vStep.kind {
1072 case abiStepPointer:
1073
1074 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1075 fallthrough
1076 case abiStepIntReg:
1077 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1078 case abiStepFloatReg:
1079 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1080 default:
1081 panic("unexpected value step")
1082 }
1083 }
1084 }
1085
1086 methodFrameSize := methodFrameType.Size()
1087
1088
1089 methodFrameSize = align(methodFrameSize, goarch.PtrSize)
1090 methodFrameSize += methodABI.spill
1091
1092
1093 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1094
1095
1096
1097
1098 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.Size()), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109 if valueRegs != nil {
1110 *valueRegs = methodRegs
1111 }
1112 if retSize := methodFrameType.Size() - methodABI.retOffset; retSize > 0 {
1113 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1114 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1115
1116 memmove(valueRet, methodRet, retSize)
1117 }
1118
1119
1120
1121 *retValid = true
1122
1123
1124
1125
1126 typedmemclr(methodFrameType, methodFrame)
1127 methodFramePool.Put(methodFrame)
1128
1129
1130 runtime.KeepAlive(ctxt)
1131
1132
1133
1134
1135 runtime.KeepAlive(valueRegs)
1136 }
1137
1138
1139 func funcName(f func([]Value) []Value) string {
1140 pc := *(*uintptr)(unsafe.Pointer(&f))
1141 rf := runtime.FuncForPC(pc)
1142 if rf != nil {
1143 return rf.Name()
1144 }
1145 return "closure"
1146 }
1147
1148
1149
1150 func (v Value) Cap() int {
1151
1152 if v.kind() == Slice {
1153 return (*unsafeheader.Slice)(v.ptr).Cap
1154 }
1155 return v.capNonSlice()
1156 }
1157
1158 func (v Value) capNonSlice() int {
1159 k := v.kind()
1160 switch k {
1161 case Array:
1162 return v.typ().Len()
1163 case Chan:
1164 return chancap(v.pointer())
1165 case Ptr:
1166 if v.typ().Elem().Kind() == abi.Array {
1167 return v.typ().Elem().Len()
1168 }
1169 panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
1170 }
1171 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1172 }
1173
1174
1175
1176
1177 func (v Value) Close() {
1178 v.mustBe(Chan)
1179 v.mustBeExported()
1180 tt := (*chanType)(unsafe.Pointer(v.typ()))
1181 if ChanDir(tt.Dir)&SendDir == 0 {
1182 panic("reflect: close of receive-only channel")
1183 }
1184
1185 chanclose(v.pointer())
1186 }
1187
1188
1189 func (v Value) CanComplex() bool {
1190 switch v.kind() {
1191 case Complex64, Complex128:
1192 return true
1193 default:
1194 return false
1195 }
1196 }
1197
1198
1199
1200 func (v Value) Complex() complex128 {
1201 k := v.kind()
1202 switch k {
1203 case Complex64:
1204 return complex128(*(*complex64)(v.ptr))
1205 case Complex128:
1206 return *(*complex128)(v.ptr)
1207 }
1208 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1209 }
1210
1211
1212
1213
1214
1215 func (v Value) Elem() Value {
1216 k := v.kind()
1217 switch k {
1218 case Interface:
1219 var eface any
1220 if v.typ().NumMethod() == 0 {
1221 eface = *(*any)(v.ptr)
1222 } else {
1223 eface = (any)(*(*interface {
1224 M()
1225 })(v.ptr))
1226 }
1227 x := unpackEface(eface)
1228 if x.flag != 0 {
1229 x.flag |= v.flag.ro()
1230 }
1231 return x
1232 case Pointer:
1233 ptr := v.ptr
1234 if v.flag&flagIndir != 0 {
1235 if v.typ().IfaceIndir() {
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246 if !verifyNotInHeapPtr(*(*uintptr)(ptr)) {
1247 panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
1248 }
1249 }
1250 ptr = *(*unsafe.Pointer)(ptr)
1251 }
1252
1253 if ptr == nil {
1254 return Value{}
1255 }
1256 tt := (*ptrType)(unsafe.Pointer(v.typ()))
1257 typ := tt.Elem
1258 fl := v.flag&flagRO | flagIndir | flagAddr
1259 fl |= flag(typ.Kind())
1260 return Value{typ, ptr, fl}
1261 }
1262 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1263 }
1264
1265
1266
1267 func (v Value) Field(i int) Value {
1268 if v.kind() != Struct {
1269 panic(&ValueError{"reflect.Value.Field", v.kind()})
1270 }
1271 tt := (*structType)(unsafe.Pointer(v.typ()))
1272 if uint(i) >= uint(len(tt.Fields)) {
1273 panic("reflect: Field index out of range")
1274 }
1275 field := &tt.Fields[i]
1276 typ := field.Typ
1277
1278
1279 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1280
1281 if !field.Name.IsExported() {
1282 if field.Embedded() {
1283 fl |= flagEmbedRO
1284 } else {
1285 fl |= flagStickyRO
1286 }
1287 }
1288
1289
1290
1291
1292
1293 ptr := add(v.ptr, field.Offset, "same as non-reflect &v.field")
1294 return Value{typ, ptr, fl}
1295 }
1296
1297
1298
1299
1300 func (v Value) FieldByIndex(index []int) Value {
1301 if len(index) == 1 {
1302 return v.Field(index[0])
1303 }
1304 v.mustBe(Struct)
1305 for i, x := range index {
1306 if i > 0 {
1307 if v.Kind() == Pointer && v.typ().Elem().Kind() == abi.Struct {
1308 if v.IsNil() {
1309 panic("reflect: indirection through nil pointer to embedded struct")
1310 }
1311 v = v.Elem()
1312 }
1313 }
1314 v = v.Field(x)
1315 }
1316 return v
1317 }
1318
1319
1320
1321
1322
1323 func (v Value) FieldByIndexErr(index []int) (Value, error) {
1324 if len(index) == 1 {
1325 return v.Field(index[0]), nil
1326 }
1327 v.mustBe(Struct)
1328 for i, x := range index {
1329 if i > 0 {
1330 if v.Kind() == Ptr && v.typ().Elem().Kind() == abi.Struct {
1331 if v.IsNil() {
1332 return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(v.typ().Elem()))
1333 }
1334 v = v.Elem()
1335 }
1336 }
1337 v = v.Field(x)
1338 }
1339 return v, nil
1340 }
1341
1342
1343
1344
1345 func (v Value) FieldByName(name string) Value {
1346 v.mustBe(Struct)
1347 if f, ok := toRType(v.typ()).FieldByName(name); ok {
1348 return v.FieldByIndex(f.Index)
1349 }
1350 return Value{}
1351 }
1352
1353
1354
1355
1356
1357 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1358 if f, ok := toRType(v.typ()).FieldByNameFunc(match); ok {
1359 return v.FieldByIndex(f.Index)
1360 }
1361 return Value{}
1362 }
1363
1364
1365 func (v Value) CanFloat() bool {
1366 switch v.kind() {
1367 case Float32, Float64:
1368 return true
1369 default:
1370 return false
1371 }
1372 }
1373
1374
1375
1376 func (v Value) Float() float64 {
1377 k := v.kind()
1378 switch k {
1379 case Float32:
1380 return float64(*(*float32)(v.ptr))
1381 case Float64:
1382 return *(*float64)(v.ptr)
1383 }
1384 panic(&ValueError{"reflect.Value.Float", v.kind()})
1385 }
1386
1387 var uint8Type = rtypeOf(uint8(0))
1388
1389
1390
1391 func (v Value) Index(i int) Value {
1392 switch v.kind() {
1393 case Array:
1394 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1395 if uint(i) >= uint(tt.Len) {
1396 panic("reflect: array index out of range")
1397 }
1398 typ := tt.Elem
1399 offset := uintptr(i) * typ.Size()
1400
1401
1402
1403
1404
1405
1406 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1407 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1408 return Value{typ, val, fl}
1409
1410 case Slice:
1411
1412
1413 s := (*unsafeheader.Slice)(v.ptr)
1414 if uint(i) >= uint(s.Len) {
1415 panic("reflect: slice index out of range")
1416 }
1417 tt := (*sliceType)(unsafe.Pointer(v.typ()))
1418 typ := tt.Elem
1419 val := arrayAt(s.Data, i, typ.Size(), "i < s.Len")
1420 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1421 return Value{typ, val, fl}
1422
1423 case String:
1424 s := (*unsafeheader.String)(v.ptr)
1425 if uint(i) >= uint(s.Len) {
1426 panic("reflect: string index out of range")
1427 }
1428 p := arrayAt(s.Data, i, 1, "i < s.Len")
1429 fl := v.flag.ro() | flag(Uint8) | flagIndir
1430 return Value{uint8Type, p, fl}
1431 }
1432 panic(&ValueError{"reflect.Value.Index", v.kind()})
1433 }
1434
1435
1436 func (v Value) CanInt() bool {
1437 switch v.kind() {
1438 case Int, Int8, Int16, Int32, Int64:
1439 return true
1440 default:
1441 return false
1442 }
1443 }
1444
1445
1446
1447 func (v Value) Int() int64 {
1448 k := v.kind()
1449 p := v.ptr
1450 switch k {
1451 case Int:
1452 return int64(*(*int)(p))
1453 case Int8:
1454 return int64(*(*int8)(p))
1455 case Int16:
1456 return int64(*(*int16)(p))
1457 case Int32:
1458 return int64(*(*int32)(p))
1459 case Int64:
1460 return *(*int64)(p)
1461 }
1462 panic(&ValueError{"reflect.Value.Int", v.kind()})
1463 }
1464
1465
1466 func (v Value) CanInterface() bool {
1467 if v.flag == 0 {
1468 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1469 }
1470 return v.flag&flagRO == 0
1471 }
1472
1473
1474
1475
1476
1477
1478
1479
1480 func (v Value) Interface() (i any) {
1481 return valueInterface(v, true)
1482 }
1483
1484 func valueInterface(v Value, safe bool) any {
1485 if v.flag == 0 {
1486 panic(&ValueError{"reflect.Value.Interface", Invalid})
1487 }
1488 if safe && v.flag&flagRO != 0 {
1489
1490
1491
1492 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1493 }
1494 if v.flag&flagMethod != 0 {
1495 v = makeMethodValue("Interface", v)
1496 }
1497
1498 if v.kind() == Interface {
1499
1500
1501
1502 if v.NumMethod() == 0 {
1503 return *(*any)(v.ptr)
1504 }
1505 return *(*interface {
1506 M()
1507 })(v.ptr)
1508 }
1509
1510 return packEface(v)
1511 }
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522 func (v Value) InterfaceData() [2]uintptr {
1523 v.mustBe(Interface)
1524
1525 escapes(v.ptr)
1526
1527
1528
1529
1530
1531 return *(*[2]uintptr)(v.ptr)
1532 }
1533
1534
1535
1536
1537
1538
1539
1540
1541 func (v Value) IsNil() bool {
1542 k := v.kind()
1543 switch k {
1544 case Chan, Func, Map, Pointer, UnsafePointer:
1545 if v.flag&flagMethod != 0 {
1546 return false
1547 }
1548 ptr := v.ptr
1549 if v.flag&flagIndir != 0 {
1550 ptr = *(*unsafe.Pointer)(ptr)
1551 }
1552 return ptr == nil
1553 case Interface, Slice:
1554
1555
1556 return *(*unsafe.Pointer)(v.ptr) == nil
1557 }
1558 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1559 }
1560
1561
1562
1563
1564
1565
1566 func (v Value) IsValid() bool {
1567 return v.flag != 0
1568 }
1569
1570
1571
1572 func (v Value) IsZero() bool {
1573 switch v.kind() {
1574 case Bool:
1575 return !v.Bool()
1576 case Int, Int8, Int16, Int32, Int64:
1577 return v.Int() == 0
1578 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1579 return v.Uint() == 0
1580 case Float32, Float64:
1581 return v.Float() == 0
1582 case Complex64, Complex128:
1583 return v.Complex() == 0
1584 case Array:
1585 if v.flag&flagIndir == 0 {
1586 return v.ptr == nil
1587 }
1588 typ := (*abi.ArrayType)(unsafe.Pointer(v.typ()))
1589
1590 if typ.Equal != nil && typ.Size() <= abi.ZeroValSize {
1591
1592
1593
1594 return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1595 }
1596 if typ.TFlag&abi.TFlagRegularMemory != 0 {
1597
1598
1599 return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size()))
1600 }
1601 n := int(typ.Len)
1602 for i := 0; i < n; i++ {
1603 if !v.Index(i).IsZero() {
1604 return false
1605 }
1606 }
1607 return true
1608 case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
1609 return v.IsNil()
1610 case String:
1611 return v.Len() == 0
1612 case Struct:
1613 if v.flag&flagIndir == 0 {
1614 return v.ptr == nil
1615 }
1616 typ := (*abi.StructType)(unsafe.Pointer(v.typ()))
1617
1618 if typ.Equal != nil && typ.Size() <= abi.ZeroValSize {
1619
1620 return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1621 }
1622 if typ.TFlag&abi.TFlagRegularMemory != 0 {
1623
1624
1625 return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size()))
1626 }
1627
1628 n := v.NumField()
1629 for i := 0; i < n; i++ {
1630 if !v.Field(i).IsZero() && v.Type().Field(i).Name != "_" {
1631 return false
1632 }
1633 }
1634 return true
1635 default:
1636
1637
1638 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1639 }
1640 }
1641
1642
1643
1644 func isZero(b []byte) bool {
1645 if len(b) == 0 {
1646 return true
1647 }
1648 const n = 32
1649
1650 for uintptr(unsafe.Pointer(&b[0]))%8 != 0 {
1651 if b[0] != 0 {
1652 return false
1653 }
1654 b = b[1:]
1655 if len(b) == 0 {
1656 return true
1657 }
1658 }
1659 for len(b)%8 != 0 {
1660 if b[len(b)-1] != 0 {
1661 return false
1662 }
1663 b = b[:len(b)-1]
1664 }
1665 if len(b) == 0 {
1666 return true
1667 }
1668 w := unsafe.Slice((*uint64)(unsafe.Pointer(&b[0])), len(b)/8)
1669 for len(w)%n != 0 {
1670 if w[0] != 0 {
1671 return false
1672 }
1673 w = w[1:]
1674 }
1675 for len(w) >= n {
1676 if w[0] != 0 || w[1] != 0 || w[2] != 0 || w[3] != 0 ||
1677 w[4] != 0 || w[5] != 0 || w[6] != 0 || w[7] != 0 ||
1678 w[8] != 0 || w[9] != 0 || w[10] != 0 || w[11] != 0 ||
1679 w[12] != 0 || w[13] != 0 || w[14] != 0 || w[15] != 0 ||
1680 w[16] != 0 || w[17] != 0 || w[18] != 0 || w[19] != 0 ||
1681 w[20] != 0 || w[21] != 0 || w[22] != 0 || w[23] != 0 ||
1682 w[24] != 0 || w[25] != 0 || w[26] != 0 || w[27] != 0 ||
1683 w[28] != 0 || w[29] != 0 || w[30] != 0 || w[31] != 0 {
1684 return false
1685 }
1686 w = w[n:]
1687 }
1688 return true
1689 }
1690
1691
1692
1693 func (v Value) SetZero() {
1694 v.mustBeAssignable()
1695 switch v.kind() {
1696 case Bool:
1697 *(*bool)(v.ptr) = false
1698 case Int:
1699 *(*int)(v.ptr) = 0
1700 case Int8:
1701 *(*int8)(v.ptr) = 0
1702 case Int16:
1703 *(*int16)(v.ptr) = 0
1704 case Int32:
1705 *(*int32)(v.ptr) = 0
1706 case Int64:
1707 *(*int64)(v.ptr) = 0
1708 case Uint:
1709 *(*uint)(v.ptr) = 0
1710 case Uint8:
1711 *(*uint8)(v.ptr) = 0
1712 case Uint16:
1713 *(*uint16)(v.ptr) = 0
1714 case Uint32:
1715 *(*uint32)(v.ptr) = 0
1716 case Uint64:
1717 *(*uint64)(v.ptr) = 0
1718 case Uintptr:
1719 *(*uintptr)(v.ptr) = 0
1720 case Float32:
1721 *(*float32)(v.ptr) = 0
1722 case Float64:
1723 *(*float64)(v.ptr) = 0
1724 case Complex64:
1725 *(*complex64)(v.ptr) = 0
1726 case Complex128:
1727 *(*complex128)(v.ptr) = 0
1728 case String:
1729 *(*string)(v.ptr) = ""
1730 case Slice:
1731 *(*unsafeheader.Slice)(v.ptr) = unsafeheader.Slice{}
1732 case Interface:
1733 *(*abi.EmptyInterface)(v.ptr) = abi.EmptyInterface{}
1734 case Chan, Func, Map, Pointer, UnsafePointer:
1735 *(*unsafe.Pointer)(v.ptr) = nil
1736 case Array, Struct:
1737 typedmemclr(v.typ(), v.ptr)
1738 default:
1739
1740
1741 panic(&ValueError{"reflect.Value.SetZero", v.Kind()})
1742 }
1743 }
1744
1745
1746
1747 func (v Value) Kind() Kind {
1748 return v.kind()
1749 }
1750
1751
1752
1753 func (v Value) Len() int {
1754
1755 if v.kind() == Slice {
1756 return (*unsafeheader.Slice)(v.ptr).Len
1757 }
1758 return v.lenNonSlice()
1759 }
1760
1761 func (v Value) lenNonSlice() int {
1762 switch k := v.kind(); k {
1763 case Array:
1764 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1765 return int(tt.Len)
1766 case Chan:
1767 return chanlen(v.pointer())
1768 case Map:
1769 return maplen(v.pointer())
1770 case String:
1771
1772 return (*unsafeheader.String)(v.ptr).Len
1773 case Ptr:
1774 if v.typ().Elem().Kind() == abi.Array {
1775 return v.typ().Elem().Len()
1776 }
1777 panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
1778 }
1779 panic(&ValueError{"reflect.Value.Len", v.kind()})
1780 }
1781
1782 var stringType = rtypeOf("")
1783
1784
1785
1786
1787
1788 func (v Value) MapIndex(key Value) Value {
1789 v.mustBe(Map)
1790 tt := (*mapType)(unsafe.Pointer(v.typ()))
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800 var e unsafe.Pointer
1801 if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= abi.MapMaxElemBytes {
1802 k := *(*string)(key.ptr)
1803 e = mapaccess_faststr(v.typ(), v.pointer(), k)
1804 } else {
1805 key = key.assignTo("reflect.Value.MapIndex", tt.Key, nil)
1806 var k unsafe.Pointer
1807 if key.flag&flagIndir != 0 {
1808 k = key.ptr
1809 } else {
1810 k = unsafe.Pointer(&key.ptr)
1811 }
1812 e = mapaccess(v.typ(), v.pointer(), k)
1813 }
1814 if e == nil {
1815 return Value{}
1816 }
1817 typ := tt.Elem
1818 fl := (v.flag | key.flag).ro()
1819 fl |= flag(typ.Kind())
1820 return copyVal(typ, fl, e)
1821 }
1822
1823
1824
1825
1826
1827 func (v Value) MapKeys() []Value {
1828 v.mustBe(Map)
1829 tt := (*mapType)(unsafe.Pointer(v.typ()))
1830 keyType := tt.Key
1831
1832 fl := v.flag.ro() | flag(keyType.Kind())
1833
1834 m := v.pointer()
1835 mlen := int(0)
1836 if m != nil {
1837 mlen = maplen(m)
1838 }
1839 var it hiter
1840 mapiterinit(v.typ(), m, &it)
1841 a := make([]Value, mlen)
1842 var i int
1843 for i = 0; i < len(a); i++ {
1844 key := mapiterkey(&it)
1845 if key == nil {
1846
1847
1848
1849 break
1850 }
1851 a[i] = copyVal(keyType, fl, key)
1852 mapiternext(&it)
1853 }
1854 return a[:i]
1855 }
1856
1857
1858
1859
1860
1861 type hiter struct {
1862 key unsafe.Pointer
1863 elem unsafe.Pointer
1864 t unsafe.Pointer
1865 h unsafe.Pointer
1866 buckets unsafe.Pointer
1867 bptr unsafe.Pointer
1868 overflow *[]unsafe.Pointer
1869 oldoverflow *[]unsafe.Pointer
1870 startBucket uintptr
1871 offset uint8
1872 wrapped bool
1873 B uint8
1874 i uint8
1875 bucket uintptr
1876 checkBucket uintptr
1877 }
1878
1879 func (h *hiter) initialized() bool {
1880 return h.t != nil
1881 }
1882
1883
1884
1885 type MapIter struct {
1886 m Value
1887 hiter hiter
1888 }
1889
1890
1891 func (iter *MapIter) Key() Value {
1892 if !iter.hiter.initialized() {
1893 panic("MapIter.Key called before Next")
1894 }
1895 iterkey := mapiterkey(&iter.hiter)
1896 if iterkey == nil {
1897 panic("MapIter.Key called on exhausted iterator")
1898 }
1899
1900 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1901 ktype := t.Key
1902 return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
1903 }
1904
1905
1906
1907
1908
1909 func (v Value) SetIterKey(iter *MapIter) {
1910 if !iter.hiter.initialized() {
1911 panic("reflect: Value.SetIterKey called before Next")
1912 }
1913 iterkey := mapiterkey(&iter.hiter)
1914 if iterkey == nil {
1915 panic("reflect: Value.SetIterKey called on exhausted iterator")
1916 }
1917
1918 v.mustBeAssignable()
1919 var target unsafe.Pointer
1920 if v.kind() == Interface {
1921 target = v.ptr
1922 }
1923
1924 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1925 ktype := t.Key
1926
1927 iter.m.mustBeExported()
1928 key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
1929 key = key.assignTo("reflect.MapIter.SetKey", v.typ(), target)
1930 typedmemmove(v.typ(), v.ptr, key.ptr)
1931 }
1932
1933
1934 func (iter *MapIter) Value() Value {
1935 if !iter.hiter.initialized() {
1936 panic("MapIter.Value called before Next")
1937 }
1938 iterelem := mapiterelem(&iter.hiter)
1939 if iterelem == nil {
1940 panic("MapIter.Value called on exhausted iterator")
1941 }
1942
1943 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1944 vtype := t.Elem
1945 return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
1946 }
1947
1948
1949
1950
1951
1952 func (v Value) SetIterValue(iter *MapIter) {
1953 if !iter.hiter.initialized() {
1954 panic("reflect: Value.SetIterValue called before Next")
1955 }
1956 iterelem := mapiterelem(&iter.hiter)
1957 if iterelem == nil {
1958 panic("reflect: Value.SetIterValue called on exhausted iterator")
1959 }
1960
1961 v.mustBeAssignable()
1962 var target unsafe.Pointer
1963 if v.kind() == Interface {
1964 target = v.ptr
1965 }
1966
1967 t := (*mapType)(unsafe.Pointer(iter.m.typ()))
1968 vtype := t.Elem
1969
1970 iter.m.mustBeExported()
1971 elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
1972 elem = elem.assignTo("reflect.MapIter.SetValue", v.typ(), target)
1973 typedmemmove(v.typ(), v.ptr, elem.ptr)
1974 }
1975
1976
1977
1978
1979 func (iter *MapIter) Next() bool {
1980 if !iter.m.IsValid() {
1981 panic("MapIter.Next called on an iterator that does not have an associated map Value")
1982 }
1983 if !iter.hiter.initialized() {
1984 mapiterinit(iter.m.typ(), iter.m.pointer(), &iter.hiter)
1985 } else {
1986 if mapiterkey(&iter.hiter) == nil {
1987 panic("MapIter.Next called on exhausted iterator")
1988 }
1989 mapiternext(&iter.hiter)
1990 }
1991 return mapiterkey(&iter.hiter) != nil
1992 }
1993
1994
1995
1996
1997
1998 func (iter *MapIter) Reset(v Value) {
1999 if v.IsValid() {
2000 v.mustBe(Map)
2001 }
2002 iter.m = v
2003 iter.hiter = hiter{}
2004 }
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021 func (v Value) MapRange() *MapIter {
2022
2023
2024
2025
2026 if v.kind() != Map {
2027 v.panicNotMap()
2028 }
2029 return &MapIter{m: v}
2030 }
2031
2032
2033
2034
2035
2036
2037 func (f flag) panicNotMap() {
2038 f.mustBe(Map)
2039 }
2040
2041
2042
2043 func copyVal(typ *abi.Type, fl flag, ptr unsafe.Pointer) Value {
2044 if typ.IfaceIndir() {
2045
2046
2047 c := unsafe_New(typ)
2048 typedmemmove(typ, c, ptr)
2049 return Value{typ, c, fl | flagIndir}
2050 }
2051 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
2052 }
2053
2054
2055
2056
2057
2058 func (v Value) Method(i int) Value {
2059 if v.typ() == nil {
2060 panic(&ValueError{"reflect.Value.Method", Invalid})
2061 }
2062 if v.flag&flagMethod != 0 || uint(i) >= uint(toRType(v.typ()).NumMethod()) {
2063 panic("reflect: Method index out of range")
2064 }
2065 if v.typ().Kind() == abi.Interface && v.IsNil() {
2066 panic("reflect: Method on nil interface value")
2067 }
2068 fl := v.flag.ro() | (v.flag & flagIndir)
2069 fl |= flag(Func)
2070 fl |= flag(i)<<flagMethodShift | flagMethod
2071 return Value{v.typ(), v.ptr, fl}
2072 }
2073
2074
2075
2076
2077
2078
2079 func (v Value) NumMethod() int {
2080 if v.typ() == nil {
2081 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
2082 }
2083 if v.flag&flagMethod != 0 {
2084 return 0
2085 }
2086 return toRType(v.typ()).NumMethod()
2087 }
2088
2089
2090
2091
2092
2093
2094 func (v Value) MethodByName(name string) Value {
2095 if v.typ() == nil {
2096 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
2097 }
2098 if v.flag&flagMethod != 0 {
2099 return Value{}
2100 }
2101 m, ok := toRType(v.typ()).MethodByName(name)
2102 if !ok {
2103 return Value{}
2104 }
2105 return v.Method(m.Index)
2106 }
2107
2108
2109
2110 func (v Value) NumField() int {
2111 v.mustBe(Struct)
2112 tt := (*structType)(unsafe.Pointer(v.typ()))
2113 return len(tt.Fields)
2114 }
2115
2116
2117
2118 func (v Value) OverflowComplex(x complex128) bool {
2119 k := v.kind()
2120 switch k {
2121 case Complex64:
2122 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
2123 case Complex128:
2124 return false
2125 }
2126 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
2127 }
2128
2129
2130
2131 func (v Value) OverflowFloat(x float64) bool {
2132 k := v.kind()
2133 switch k {
2134 case Float32:
2135 return overflowFloat32(x)
2136 case Float64:
2137 return false
2138 }
2139 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
2140 }
2141
2142 func overflowFloat32(x float64) bool {
2143 if x < 0 {
2144 x = -x
2145 }
2146 return math.MaxFloat32 < x && x <= math.MaxFloat64
2147 }
2148
2149
2150
2151 func (v Value) OverflowInt(x int64) bool {
2152 k := v.kind()
2153 switch k {
2154 case Int, Int8, Int16, Int32, Int64:
2155 bitSize := v.typ().Size() * 8
2156 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2157 return x != trunc
2158 }
2159 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
2160 }
2161
2162
2163
2164 func (v Value) OverflowUint(x uint64) bool {
2165 k := v.kind()
2166 switch k {
2167 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
2168 bitSize := v.typ_.Size() * 8
2169 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2170 return x != trunc
2171 }
2172 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
2173 }
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196 func (v Value) Pointer() uintptr {
2197
2198 escapes(v.ptr)
2199
2200 k := v.kind()
2201 switch k {
2202 case Pointer:
2203 if !v.typ().Pointers() {
2204 val := *(*uintptr)(v.ptr)
2205
2206
2207 if !verifyNotInHeapPtr(val) {
2208 panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
2209 }
2210 return val
2211 }
2212 fallthrough
2213 case Chan, Map, UnsafePointer:
2214 return uintptr(v.pointer())
2215 case Func:
2216 if v.flag&flagMethod != 0 {
2217
2218
2219
2220
2221
2222
2223 return methodValueCallCodePtr()
2224 }
2225 p := v.pointer()
2226
2227
2228 if p != nil {
2229 p = *(*unsafe.Pointer)(p)
2230 }
2231 return uintptr(p)
2232 case Slice:
2233 return uintptr((*unsafeheader.Slice)(v.ptr).Data)
2234 case String:
2235 return uintptr((*unsafeheader.String)(v.ptr).Data)
2236 }
2237 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
2238 }
2239
2240
2241
2242
2243
2244
2245 func (v Value) Recv() (x Value, ok bool) {
2246 v.mustBe(Chan)
2247 v.mustBeExported()
2248 return v.recv(false)
2249 }
2250
2251
2252
2253 func (v Value) recv(nb bool) (val Value, ok bool) {
2254 tt := (*chanType)(unsafe.Pointer(v.typ()))
2255 if ChanDir(tt.Dir)&RecvDir == 0 {
2256 panic("reflect: recv on send-only channel")
2257 }
2258 t := tt.Elem
2259 val = Value{t, nil, flag(t.Kind())}
2260 var p unsafe.Pointer
2261 if t.IfaceIndir() {
2262 p = unsafe_New(t)
2263 val.ptr = p
2264 val.flag |= flagIndir
2265 } else {
2266 p = unsafe.Pointer(&val.ptr)
2267 }
2268 selected, ok := chanrecv(v.pointer(), nb, p)
2269 if !selected {
2270 val = Value{}
2271 }
2272 return
2273 }
2274
2275
2276
2277
2278 func (v Value) Send(x Value) {
2279 v.mustBe(Chan)
2280 v.mustBeExported()
2281 v.send(x, false)
2282 }
2283
2284
2285
2286 func (v Value) send(x Value, nb bool) (selected bool) {
2287 tt := (*chanType)(unsafe.Pointer(v.typ()))
2288 if ChanDir(tt.Dir)&SendDir == 0 {
2289 panic("reflect: send on recv-only channel")
2290 }
2291 x.mustBeExported()
2292 x = x.assignTo("reflect.Value.Send", tt.Elem, nil)
2293 var p unsafe.Pointer
2294 if x.flag&flagIndir != 0 {
2295 p = x.ptr
2296 } else {
2297 p = unsafe.Pointer(&x.ptr)
2298 }
2299 return chansend(v.pointer(), p, nb)
2300 }
2301
2302
2303
2304
2305
2306 func (v Value) Set(x Value) {
2307 v.mustBeAssignable()
2308 x.mustBeExported()
2309 var target unsafe.Pointer
2310 if v.kind() == Interface {
2311 target = v.ptr
2312 }
2313 x = x.assignTo("reflect.Set", v.typ(), target)
2314 if x.flag&flagIndir != 0 {
2315 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
2316 typedmemclr(v.typ(), v.ptr)
2317 } else {
2318 typedmemmove(v.typ(), v.ptr, x.ptr)
2319 }
2320 } else {
2321 *(*unsafe.Pointer)(v.ptr) = x.ptr
2322 }
2323 }
2324
2325
2326
2327 func (v Value) SetBool(x bool) {
2328 v.mustBeAssignable()
2329 v.mustBe(Bool)
2330 *(*bool)(v.ptr) = x
2331 }
2332
2333
2334
2335 func (v Value) SetBytes(x []byte) {
2336 v.mustBeAssignable()
2337 v.mustBe(Slice)
2338 if toRType(v.typ()).Elem().Kind() != Uint8 {
2339 panic("reflect.Value.SetBytes of non-byte slice")
2340 }
2341 *(*[]byte)(v.ptr) = x
2342 }
2343
2344
2345
2346 func (v Value) setRunes(x []rune) {
2347 v.mustBeAssignable()
2348 v.mustBe(Slice)
2349 if v.typ().Elem().Kind() != abi.Int32 {
2350 panic("reflect.Value.setRunes of non-rune slice")
2351 }
2352 *(*[]rune)(v.ptr) = x
2353 }
2354
2355
2356
2357 func (v Value) SetComplex(x complex128) {
2358 v.mustBeAssignable()
2359 switch k := v.kind(); k {
2360 default:
2361 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
2362 case Complex64:
2363 *(*complex64)(v.ptr) = complex64(x)
2364 case Complex128:
2365 *(*complex128)(v.ptr) = x
2366 }
2367 }
2368
2369
2370
2371 func (v Value) SetFloat(x float64) {
2372 v.mustBeAssignable()
2373 switch k := v.kind(); k {
2374 default:
2375 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
2376 case Float32:
2377 *(*float32)(v.ptr) = float32(x)
2378 case Float64:
2379 *(*float64)(v.ptr) = x
2380 }
2381 }
2382
2383
2384
2385 func (v Value) SetInt(x int64) {
2386 v.mustBeAssignable()
2387 switch k := v.kind(); k {
2388 default:
2389 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
2390 case Int:
2391 *(*int)(v.ptr) = int(x)
2392 case Int8:
2393 *(*int8)(v.ptr) = int8(x)
2394 case Int16:
2395 *(*int16)(v.ptr) = int16(x)
2396 case Int32:
2397 *(*int32)(v.ptr) = int32(x)
2398 case Int64:
2399 *(*int64)(v.ptr) = x
2400 }
2401 }
2402
2403
2404
2405
2406 func (v Value) SetLen(n int) {
2407 v.mustBeAssignable()
2408 v.mustBe(Slice)
2409 s := (*unsafeheader.Slice)(v.ptr)
2410 if uint(n) > uint(s.Cap) {
2411 panic("reflect: slice length out of range in SetLen")
2412 }
2413 s.Len = n
2414 }
2415
2416
2417
2418
2419 func (v Value) SetCap(n int) {
2420 v.mustBeAssignable()
2421 v.mustBe(Slice)
2422 s := (*unsafeheader.Slice)(v.ptr)
2423 if n < s.Len || n > s.Cap {
2424 panic("reflect: slice capacity out of range in SetCap")
2425 }
2426 s.Cap = n
2427 }
2428
2429
2430
2431
2432
2433
2434
2435 func (v Value) SetMapIndex(key, elem Value) {
2436 v.mustBe(Map)
2437 v.mustBeExported()
2438 key.mustBeExported()
2439 tt := (*mapType)(unsafe.Pointer(v.typ()))
2440
2441 if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= abi.MapMaxElemBytes {
2442 k := *(*string)(key.ptr)
2443 if elem.typ() == nil {
2444 mapdelete_faststr(v.typ(), v.pointer(), k)
2445 return
2446 }
2447 elem.mustBeExported()
2448 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.Elem, nil)
2449 var e unsafe.Pointer
2450 if elem.flag&flagIndir != 0 {
2451 e = elem.ptr
2452 } else {
2453 e = unsafe.Pointer(&elem.ptr)
2454 }
2455 mapassign_faststr(v.typ(), v.pointer(), k, e)
2456 return
2457 }
2458
2459 key = key.assignTo("reflect.Value.SetMapIndex", tt.Key, nil)
2460 var k unsafe.Pointer
2461 if key.flag&flagIndir != 0 {
2462 k = key.ptr
2463 } else {
2464 k = unsafe.Pointer(&key.ptr)
2465 }
2466 if elem.typ() == nil {
2467 mapdelete(v.typ(), v.pointer(), k)
2468 return
2469 }
2470 elem.mustBeExported()
2471 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.Elem, nil)
2472 var e unsafe.Pointer
2473 if elem.flag&flagIndir != 0 {
2474 e = elem.ptr
2475 } else {
2476 e = unsafe.Pointer(&elem.ptr)
2477 }
2478 mapassign(v.typ(), v.pointer(), k, e)
2479 }
2480
2481
2482
2483 func (v Value) SetUint(x uint64) {
2484 v.mustBeAssignable()
2485 switch k := v.kind(); k {
2486 default:
2487 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2488 case Uint:
2489 *(*uint)(v.ptr) = uint(x)
2490 case Uint8:
2491 *(*uint8)(v.ptr) = uint8(x)
2492 case Uint16:
2493 *(*uint16)(v.ptr) = uint16(x)
2494 case Uint32:
2495 *(*uint32)(v.ptr) = uint32(x)
2496 case Uint64:
2497 *(*uint64)(v.ptr) = x
2498 case Uintptr:
2499 *(*uintptr)(v.ptr) = uintptr(x)
2500 }
2501 }
2502
2503
2504
2505 func (v Value) SetPointer(x unsafe.Pointer) {
2506 v.mustBeAssignable()
2507 v.mustBe(UnsafePointer)
2508 *(*unsafe.Pointer)(v.ptr) = x
2509 }
2510
2511
2512
2513 func (v Value) SetString(x string) {
2514 v.mustBeAssignable()
2515 v.mustBe(String)
2516 *(*string)(v.ptr) = x
2517 }
2518
2519
2520
2521
2522 func (v Value) Slice(i, j int) Value {
2523 var (
2524 cap int
2525 typ *sliceType
2526 base unsafe.Pointer
2527 )
2528 switch kind := v.kind(); kind {
2529 default:
2530 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2531
2532 case Array:
2533 if v.flag&flagAddr == 0 {
2534 panic("reflect.Value.Slice: slice of unaddressable array")
2535 }
2536 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2537 cap = int(tt.Len)
2538 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2539 base = v.ptr
2540
2541 case Slice:
2542 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2543 s := (*unsafeheader.Slice)(v.ptr)
2544 base = s.Data
2545 cap = s.Cap
2546
2547 case String:
2548 s := (*unsafeheader.String)(v.ptr)
2549 if i < 0 || j < i || j > s.Len {
2550 panic("reflect.Value.Slice: string slice index out of bounds")
2551 }
2552 var t unsafeheader.String
2553 if i < s.Len {
2554 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2555 }
2556 return Value{v.typ(), unsafe.Pointer(&t), v.flag}
2557 }
2558
2559 if i < 0 || j < i || j > cap {
2560 panic("reflect.Value.Slice: slice index out of bounds")
2561 }
2562
2563
2564 var x []unsafe.Pointer
2565
2566
2567 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2568 s.Len = j - i
2569 s.Cap = cap - i
2570 if cap-i > 0 {
2571 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < cap")
2572 } else {
2573
2574 s.Data = base
2575 }
2576
2577 fl := v.flag.ro() | flagIndir | flag(Slice)
2578 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2579 }
2580
2581
2582
2583
2584 func (v Value) Slice3(i, j, k int) Value {
2585 var (
2586 cap int
2587 typ *sliceType
2588 base unsafe.Pointer
2589 )
2590 switch kind := v.kind(); kind {
2591 default:
2592 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2593
2594 case Array:
2595 if v.flag&flagAddr == 0 {
2596 panic("reflect.Value.Slice3: slice of unaddressable array")
2597 }
2598 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2599 cap = int(tt.Len)
2600 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2601 base = v.ptr
2602
2603 case Slice:
2604 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2605 s := (*unsafeheader.Slice)(v.ptr)
2606 base = s.Data
2607 cap = s.Cap
2608 }
2609
2610 if i < 0 || j < i || k < j || k > cap {
2611 panic("reflect.Value.Slice3: slice index out of bounds")
2612 }
2613
2614
2615
2616 var x []unsafe.Pointer
2617
2618
2619 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2620 s.Len = j - i
2621 s.Cap = k - i
2622 if k-i > 0 {
2623 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < k <= cap")
2624 } else {
2625
2626 s.Data = base
2627 }
2628
2629 fl := v.flag.ro() | flagIndir | flag(Slice)
2630 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2631 }
2632
2633
2634
2635
2636
2637
2638
2639 func (v Value) String() string {
2640
2641 if v.kind() == String {
2642 return *(*string)(v.ptr)
2643 }
2644 return v.stringNonString()
2645 }
2646
2647 func (v Value) stringNonString() string {
2648 if v.kind() == Invalid {
2649 return "<invalid Value>"
2650 }
2651
2652
2653 return "<" + v.Type().String() + " Value>"
2654 }
2655
2656
2657
2658
2659
2660
2661 func (v Value) TryRecv() (x Value, ok bool) {
2662 v.mustBe(Chan)
2663 v.mustBeExported()
2664 return v.recv(true)
2665 }
2666
2667
2668
2669
2670
2671 func (v Value) TrySend(x Value) bool {
2672 v.mustBe(Chan)
2673 v.mustBeExported()
2674 return v.send(x, true)
2675 }
2676
2677
2678 func (v Value) Type() Type {
2679 if v.flag != 0 && v.flag&flagMethod == 0 {
2680 return (*rtype)(noescape(unsafe.Pointer(v.typ_)))
2681 }
2682 return v.typeSlow()
2683 }
2684
2685 func (v Value) typeSlow() Type {
2686 if v.flag == 0 {
2687 panic(&ValueError{"reflect.Value.Type", Invalid})
2688 }
2689
2690 typ := v.typ()
2691 if v.flag&flagMethod == 0 {
2692 return toRType(v.typ())
2693 }
2694
2695
2696
2697 i := int(v.flag) >> flagMethodShift
2698 if v.typ().Kind() == abi.Interface {
2699
2700 tt := (*interfaceType)(unsafe.Pointer(typ))
2701 if uint(i) >= uint(len(tt.Methods)) {
2702 panic("reflect: internal error: invalid method index")
2703 }
2704 m := &tt.Methods[i]
2705 return toRType(typeOffFor(typ, m.Typ))
2706 }
2707
2708 ms := typ.ExportedMethods()
2709 if uint(i) >= uint(len(ms)) {
2710 panic("reflect: internal error: invalid method index")
2711 }
2712 m := ms[i]
2713 return toRType(typeOffFor(typ, m.Mtyp))
2714 }
2715
2716
2717 func (v Value) CanUint() bool {
2718 switch v.kind() {
2719 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2720 return true
2721 default:
2722 return false
2723 }
2724 }
2725
2726
2727
2728 func (v Value) Uint() uint64 {
2729 k := v.kind()
2730 p := v.ptr
2731 switch k {
2732 case Uint:
2733 return uint64(*(*uint)(p))
2734 case Uint8:
2735 return uint64(*(*uint8)(p))
2736 case Uint16:
2737 return uint64(*(*uint16)(p))
2738 case Uint32:
2739 return uint64(*(*uint32)(p))
2740 case Uint64:
2741 return *(*uint64)(p)
2742 case Uintptr:
2743 return uint64(*(*uintptr)(p))
2744 }
2745 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2746 }
2747
2748
2749
2750
2751
2752
2753
2754
2755
2756
2757 func (v Value) UnsafeAddr() uintptr {
2758 if v.typ() == nil {
2759 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2760 }
2761 if v.flag&flagAddr == 0 {
2762 panic("reflect.Value.UnsafeAddr of unaddressable value")
2763 }
2764
2765 escapes(v.ptr)
2766 return uintptr(v.ptr)
2767 }
2768
2769
2770
2771
2772
2773
2774
2775
2776
2777
2778
2779
2780
2781
2782
2783 func (v Value) UnsafePointer() unsafe.Pointer {
2784 k := v.kind()
2785 switch k {
2786 case Pointer:
2787 if !v.typ().Pointers() {
2788
2789
2790 if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
2791 panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
2792 }
2793 return *(*unsafe.Pointer)(v.ptr)
2794 }
2795 fallthrough
2796 case Chan, Map, UnsafePointer:
2797 return v.pointer()
2798 case Func:
2799 if v.flag&flagMethod != 0 {
2800
2801
2802
2803
2804
2805
2806 code := methodValueCallCodePtr()
2807 return *(*unsafe.Pointer)(unsafe.Pointer(&code))
2808 }
2809 p := v.pointer()
2810
2811
2812 if p != nil {
2813 p = *(*unsafe.Pointer)(p)
2814 }
2815 return p
2816 case Slice:
2817 return (*unsafeheader.Slice)(v.ptr).Data
2818 case String:
2819 return (*unsafeheader.String)(v.ptr).Data
2820 }
2821 panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()})
2822 }
2823
2824
2825
2826
2827
2828
2829
2830
2831
2832 type StringHeader struct {
2833 Data uintptr
2834 Len int
2835 }
2836
2837
2838
2839
2840
2841
2842
2843
2844
2845 type SliceHeader struct {
2846 Data uintptr
2847 Len int
2848 Cap int
2849 }
2850
2851 func typesMustMatch(what string, t1, t2 Type) {
2852 if t1 != t2 {
2853 panic(what + ": " + t1.String() + " != " + t2.String())
2854 }
2855 }
2856
2857
2858
2859
2860
2861
2862
2863
2864 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2865 return add(p, uintptr(i)*eltSize, "i < len")
2866 }
2867
2868
2869
2870
2871
2872
2873
2874 func (v Value) Grow(n int) {
2875 v.mustBeAssignable()
2876 v.mustBe(Slice)
2877 v.grow(n)
2878 }
2879
2880
2881 func (v Value) grow(n int) {
2882 p := (*unsafeheader.Slice)(v.ptr)
2883 switch {
2884 case n < 0:
2885 panic("reflect.Value.Grow: negative len")
2886 case p.Len+n < 0:
2887 panic("reflect.Value.Grow: slice overflow")
2888 case p.Len+n > p.Cap:
2889 t := v.typ().Elem()
2890 *p = growslice(t, *p, n)
2891 }
2892 }
2893
2894
2895
2896
2897
2898
2899
2900 func (v Value) extendSlice(n int) Value {
2901 v.mustBeExported()
2902 v.mustBe(Slice)
2903
2904
2905 sh := *(*unsafeheader.Slice)(v.ptr)
2906 s := &sh
2907 v.ptr = unsafe.Pointer(s)
2908 v.flag = flagIndir | flag(Slice)
2909
2910 v.grow(n)
2911 s.Len += n
2912 return v
2913 }
2914
2915
2916
2917
2918 func (v Value) Clear() {
2919 switch v.Kind() {
2920 case Slice:
2921 sh := *(*unsafeheader.Slice)(v.ptr)
2922 st := (*sliceType)(unsafe.Pointer(v.typ()))
2923 typedarrayclear(st.Elem, sh.Data, sh.Len)
2924 case Map:
2925 mapclear(v.typ(), v.pointer())
2926 default:
2927 panic(&ValueError{"reflect.Value.Clear", v.Kind()})
2928 }
2929 }
2930
2931
2932
2933 func Append(s Value, x ...Value) Value {
2934 s.mustBe(Slice)
2935 n := s.Len()
2936 s = s.extendSlice(len(x))
2937 for i, v := range x {
2938 s.Index(n + i).Set(v)
2939 }
2940 return s
2941 }
2942
2943
2944
2945 func AppendSlice(s, t Value) Value {
2946 s.mustBe(Slice)
2947 t.mustBe(Slice)
2948 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2949 ns := s.Len()
2950 nt := t.Len()
2951 s = s.extendSlice(nt)
2952 Copy(s.Slice(ns, ns+nt), t)
2953 return s
2954 }
2955
2956
2957
2958
2959
2960
2961
2962
2963 func Copy(dst, src Value) int {
2964 dk := dst.kind()
2965 if dk != Array && dk != Slice {
2966 panic(&ValueError{"reflect.Copy", dk})
2967 }
2968 if dk == Array {
2969 dst.mustBeAssignable()
2970 }
2971 dst.mustBeExported()
2972
2973 sk := src.kind()
2974 var stringCopy bool
2975 if sk != Array && sk != Slice {
2976 stringCopy = sk == String && dst.typ().Elem().Kind() == abi.Uint8
2977 if !stringCopy {
2978 panic(&ValueError{"reflect.Copy", sk})
2979 }
2980 }
2981 src.mustBeExported()
2982
2983 de := dst.typ().Elem()
2984 if !stringCopy {
2985 se := src.typ().Elem()
2986 typesMustMatch("reflect.Copy", toType(de), toType(se))
2987 }
2988
2989 var ds, ss unsafeheader.Slice
2990 if dk == Array {
2991 ds.Data = dst.ptr
2992 ds.Len = dst.Len()
2993 ds.Cap = ds.Len
2994 } else {
2995 ds = *(*unsafeheader.Slice)(dst.ptr)
2996 }
2997 if sk == Array {
2998 ss.Data = src.ptr
2999 ss.Len = src.Len()
3000 ss.Cap = ss.Len
3001 } else if sk == Slice {
3002 ss = *(*unsafeheader.Slice)(src.ptr)
3003 } else {
3004 sh := *(*unsafeheader.String)(src.ptr)
3005 ss.Data = sh.Data
3006 ss.Len = sh.Len
3007 ss.Cap = sh.Len
3008 }
3009
3010 return typedslicecopy(de.Common(), ds, ss)
3011 }
3012
3013
3014
3015 type runtimeSelect struct {
3016 dir SelectDir
3017 typ *rtype
3018 ch unsafe.Pointer
3019 val unsafe.Pointer
3020 }
3021
3022
3023
3024
3025
3026
3027
3028
3029
3030
3031
3032
3033 func rselect([]runtimeSelect) (chosen int, recvOK bool)
3034
3035
3036 type SelectDir int
3037
3038
3039
3040 const (
3041 _ SelectDir = iota
3042 SelectSend
3043 SelectRecv
3044 SelectDefault
3045 )
3046
3047
3048
3049
3050
3051
3052
3053
3054
3055
3056
3057
3058
3059
3060
3061
3062
3063 type SelectCase struct {
3064 Dir SelectDir
3065 Chan Value
3066 Send Value
3067 }
3068
3069
3070
3071
3072
3073
3074
3075
3076
3077 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
3078 if len(cases) > 65536 {
3079 panic("reflect.Select: too many cases (max 65536)")
3080 }
3081
3082
3083
3084 var runcases []runtimeSelect
3085 if len(cases) > 4 {
3086
3087 runcases = make([]runtimeSelect, len(cases))
3088 } else {
3089
3090 runcases = make([]runtimeSelect, len(cases), 4)
3091 }
3092
3093 haveDefault := false
3094 for i, c := range cases {
3095 rc := &runcases[i]
3096 rc.dir = c.Dir
3097 switch c.Dir {
3098 default:
3099 panic("reflect.Select: invalid Dir")
3100
3101 case SelectDefault:
3102 if haveDefault {
3103 panic("reflect.Select: multiple default cases")
3104 }
3105 haveDefault = true
3106 if c.Chan.IsValid() {
3107 panic("reflect.Select: default case has Chan value")
3108 }
3109 if c.Send.IsValid() {
3110 panic("reflect.Select: default case has Send value")
3111 }
3112
3113 case SelectSend:
3114 ch := c.Chan
3115 if !ch.IsValid() {
3116 break
3117 }
3118 ch.mustBe(Chan)
3119 ch.mustBeExported()
3120 tt := (*chanType)(unsafe.Pointer(ch.typ()))
3121 if ChanDir(tt.Dir)&SendDir == 0 {
3122 panic("reflect.Select: SendDir case using recv-only channel")
3123 }
3124 rc.ch = ch.pointer()
3125 rc.typ = toRType(&tt.Type)
3126 v := c.Send
3127 if !v.IsValid() {
3128 panic("reflect.Select: SendDir case missing Send value")
3129 }
3130 v.mustBeExported()
3131 v = v.assignTo("reflect.Select", tt.Elem, nil)
3132 if v.flag&flagIndir != 0 {
3133 rc.val = v.ptr
3134 } else {
3135 rc.val = unsafe.Pointer(&v.ptr)
3136 }
3137
3138
3139 escapes(rc.val)
3140
3141 case SelectRecv:
3142 if c.Send.IsValid() {
3143 panic("reflect.Select: RecvDir case has Send value")
3144 }
3145 ch := c.Chan
3146 if !ch.IsValid() {
3147 break
3148 }
3149 ch.mustBe(Chan)
3150 ch.mustBeExported()
3151 tt := (*chanType)(unsafe.Pointer(ch.typ()))
3152 if ChanDir(tt.Dir)&RecvDir == 0 {
3153 panic("reflect.Select: RecvDir case using send-only channel")
3154 }
3155 rc.ch = ch.pointer()
3156 rc.typ = toRType(&tt.Type)
3157 rc.val = unsafe_New(tt.Elem)
3158 }
3159 }
3160
3161 chosen, recvOK = rselect(runcases)
3162 if runcases[chosen].dir == SelectRecv {
3163 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
3164 t := tt.Elem
3165 p := runcases[chosen].val
3166 fl := flag(t.Kind())
3167 if t.IfaceIndir() {
3168 recv = Value{t, p, fl | flagIndir}
3169 } else {
3170 recv = Value{t, *(*unsafe.Pointer)(p), fl}
3171 }
3172 }
3173 return chosen, recv, recvOK
3174 }
3175
3176
3179
3180
3181
3182
3183 func unsafe_New(*abi.Type) unsafe.Pointer
3184
3185
3186 func unsafe_NewArray(*abi.Type, int) unsafe.Pointer
3187
3188
3189
3190 func MakeSlice(typ Type, len, cap int) Value {
3191 if typ.Kind() != Slice {
3192 panic("reflect.MakeSlice of non-slice type")
3193 }
3194 if len < 0 {
3195 panic("reflect.MakeSlice: negative len")
3196 }
3197 if cap < 0 {
3198 panic("reflect.MakeSlice: negative cap")
3199 }
3200 if len > cap {
3201 panic("reflect.MakeSlice: len > cap")
3202 }
3203
3204 s := unsafeheader.Slice{Data: unsafe_NewArray(&(typ.Elem().(*rtype).t), cap), Len: len, Cap: cap}
3205 return Value{&typ.(*rtype).t, unsafe.Pointer(&s), flagIndir | flag(Slice)}
3206 }
3207
3208
3209
3210
3211
3212 func SliceAt(typ Type, p unsafe.Pointer, n int) Value {
3213 unsafeslice(typ.common(), p, n)
3214 s := unsafeheader.Slice{Data: p, Len: n, Cap: n}
3215 return Value{SliceOf(typ).common(), unsafe.Pointer(&s), flagIndir | flag(Slice)}
3216 }
3217
3218
3219 func MakeChan(typ Type, buffer int) Value {
3220 if typ.Kind() != Chan {
3221 panic("reflect.MakeChan of non-chan type")
3222 }
3223 if buffer < 0 {
3224 panic("reflect.MakeChan: negative buffer size")
3225 }
3226 if typ.ChanDir() != BothDir {
3227 panic("reflect.MakeChan: unidirectional channel type")
3228 }
3229 t := typ.common()
3230 ch := makechan(t, buffer)
3231 return Value{t, ch, flag(Chan)}
3232 }
3233
3234
3235 func MakeMap(typ Type) Value {
3236 return MakeMapWithSize(typ, 0)
3237 }
3238
3239
3240
3241 func MakeMapWithSize(typ Type, n int) Value {
3242 if typ.Kind() != Map {
3243 panic("reflect.MakeMapWithSize of non-map type")
3244 }
3245 t := typ.common()
3246 m := makemap(t, n)
3247 return Value{t, m, flag(Map)}
3248 }
3249
3250
3251
3252
3253 func Indirect(v Value) Value {
3254 if v.Kind() != Pointer {
3255 return v
3256 }
3257 return v.Elem()
3258 }
3259
3260
3261
3262 func ValueOf(i any) Value {
3263 if i == nil {
3264 return Value{}
3265 }
3266 return unpackEface(i)
3267 }
3268
3269
3270
3271
3272
3273
3274 func Zero(typ Type) Value {
3275 if typ == nil {
3276 panic("reflect: Zero(nil)")
3277 }
3278 t := &typ.(*rtype).t
3279 fl := flag(t.Kind())
3280 if t.IfaceIndir() {
3281 var p unsafe.Pointer
3282 if t.Size() <= abi.ZeroValSize {
3283 p = unsafe.Pointer(&zeroVal[0])
3284 } else {
3285 p = unsafe_New(t)
3286 }
3287 return Value{t, p, fl | flagIndir}
3288 }
3289 return Value{t, nil, fl}
3290 }
3291
3292
3293 var zeroVal [abi.ZeroValSize]byte
3294
3295
3296
3297 func New(typ Type) Value {
3298 if typ == nil {
3299 panic("reflect: New(nil)")
3300 }
3301 t := &typ.(*rtype).t
3302 pt := ptrTo(t)
3303 if pt.IfaceIndir() {
3304
3305 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
3306 }
3307 ptr := unsafe_New(t)
3308 fl := flag(Pointer)
3309 return Value{pt, ptr, fl}
3310 }
3311
3312
3313
3314 func NewAt(typ Type, p unsafe.Pointer) Value {
3315 fl := flag(Pointer)
3316 t := typ.(*rtype)
3317 return Value{t.ptrTo(), p, fl}
3318 }
3319
3320
3321
3322
3323
3324
3325 func (v Value) assignTo(context string, dst *abi.Type, target unsafe.Pointer) Value {
3326 if v.flag&flagMethod != 0 {
3327 v = makeMethodValue(context, v)
3328 }
3329
3330 switch {
3331 case directlyAssignable(dst, v.typ()):
3332
3333
3334 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
3335 fl |= flag(dst.Kind())
3336 return Value{dst, v.ptr, fl}
3337
3338 case implements(dst, v.typ()):
3339 if v.Kind() == Interface && v.IsNil() {
3340
3341
3342
3343 return Value{dst, nil, flag(Interface)}
3344 }
3345 x := valueInterface(v, false)
3346 if target == nil {
3347 target = unsafe_New(dst)
3348 }
3349 if dst.NumMethod() == 0 {
3350 *(*any)(target) = x
3351 } else {
3352 ifaceE2I(dst, x, target)
3353 }
3354 return Value{dst, target, flagIndir | flag(Interface)}
3355 }
3356
3357
3358 panic(context + ": value of type " + stringFor(v.typ()) + " is not assignable to type " + stringFor(dst))
3359 }
3360
3361
3362
3363
3364 func (v Value) Convert(t Type) Value {
3365 if v.flag&flagMethod != 0 {
3366 v = makeMethodValue("Convert", v)
3367 }
3368 op := convertOp(t.common(), v.typ())
3369 if op == nil {
3370 panic("reflect.Value.Convert: value of type " + stringFor(v.typ()) + " cannot be converted to type " + t.String())
3371 }
3372 return op(v, t)
3373 }
3374
3375
3376
3377 func (v Value) CanConvert(t Type) bool {
3378 vt := v.Type()
3379 if !vt.ConvertibleTo(t) {
3380 return false
3381 }
3382
3383
3384 switch {
3385 case vt.Kind() == Slice && t.Kind() == Array:
3386 if t.Len() > v.Len() {
3387 return false
3388 }
3389 case vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array:
3390 n := t.Elem().Len()
3391 if n > v.Len() {
3392 return false
3393 }
3394 }
3395 return true
3396 }
3397
3398
3399
3400
3401
3402 func (v Value) Comparable() bool {
3403 k := v.Kind()
3404 switch k {
3405 case Invalid:
3406 return false
3407
3408 case Array:
3409 switch v.Type().Elem().Kind() {
3410 case Interface, Array, Struct:
3411 for i := 0; i < v.Type().Len(); i++ {
3412 if !v.Index(i).Comparable() {
3413 return false
3414 }
3415 }
3416 return true
3417 }
3418 return v.Type().Comparable()
3419
3420 case Interface:
3421 return v.IsNil() || v.Elem().Comparable()
3422
3423 case Struct:
3424 for i := 0; i < v.NumField(); i++ {
3425 if !v.Field(i).Comparable() {
3426 return false
3427 }
3428 }
3429 return true
3430
3431 default:
3432 return v.Type().Comparable()
3433 }
3434 }
3435
3436
3437
3438
3439
3440
3441
3442
3443
3444 func (v Value) Equal(u Value) bool {
3445 if v.Kind() == Interface {
3446 v = v.Elem()
3447 }
3448 if u.Kind() == Interface {
3449 u = u.Elem()
3450 }
3451
3452 if !v.IsValid() || !u.IsValid() {
3453 return v.IsValid() == u.IsValid()
3454 }
3455
3456 if v.Kind() != u.Kind() || v.Type() != u.Type() {
3457 return false
3458 }
3459
3460
3461
3462 switch v.Kind() {
3463 default:
3464 panic("reflect.Value.Equal: invalid Kind")
3465 case Bool:
3466 return v.Bool() == u.Bool()
3467 case Int, Int8, Int16, Int32, Int64:
3468 return v.Int() == u.Int()
3469 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3470 return v.Uint() == u.Uint()
3471 case Float32, Float64:
3472 return v.Float() == u.Float()
3473 case Complex64, Complex128:
3474 return v.Complex() == u.Complex()
3475 case String:
3476 return v.String() == u.String()
3477 case Chan, Pointer, UnsafePointer:
3478 return v.Pointer() == u.Pointer()
3479 case Array:
3480
3481 vl := v.Len()
3482 if vl == 0 {
3483
3484 if !v.Type().Elem().Comparable() {
3485 break
3486 }
3487 return true
3488 }
3489 for i := 0; i < vl; i++ {
3490 if !v.Index(i).Equal(u.Index(i)) {
3491 return false
3492 }
3493 }
3494 return true
3495 case Struct:
3496
3497 nf := v.NumField()
3498 for i := 0; i < nf; i++ {
3499 if !v.Field(i).Equal(u.Field(i)) {
3500 return false
3501 }
3502 }
3503 return true
3504 case Func, Map, Slice:
3505 break
3506 }
3507 panic("reflect.Value.Equal: values of type " + v.Type().String() + " are not comparable")
3508 }
3509
3510
3511
3512 func convertOp(dst, src *abi.Type) func(Value, Type) Value {
3513 switch Kind(src.Kind()) {
3514 case Int, Int8, Int16, Int32, Int64:
3515 switch Kind(dst.Kind()) {
3516 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3517 return cvtInt
3518 case Float32, Float64:
3519 return cvtIntFloat
3520 case String:
3521 return cvtIntString
3522 }
3523
3524 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3525 switch Kind(dst.Kind()) {
3526 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3527 return cvtUint
3528 case Float32, Float64:
3529 return cvtUintFloat
3530 case String:
3531 return cvtUintString
3532 }
3533
3534 case Float32, Float64:
3535 switch Kind(dst.Kind()) {
3536 case Int, Int8, Int16, Int32, Int64:
3537 return cvtFloatInt
3538 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3539 return cvtFloatUint
3540 case Float32, Float64:
3541 return cvtFloat
3542 }
3543
3544 case Complex64, Complex128:
3545 switch Kind(dst.Kind()) {
3546 case Complex64, Complex128:
3547 return cvtComplex
3548 }
3549
3550 case String:
3551 if dst.Kind() == abi.Slice && pkgPathFor(dst.Elem()) == "" {
3552 switch Kind(dst.Elem().Kind()) {
3553 case Uint8:
3554 return cvtStringBytes
3555 case Int32:
3556 return cvtStringRunes
3557 }
3558 }
3559
3560 case Slice:
3561 if dst.Kind() == abi.String && pkgPathFor(src.Elem()) == "" {
3562 switch Kind(src.Elem().Kind()) {
3563 case Uint8:
3564 return cvtBytesString
3565 case Int32:
3566 return cvtRunesString
3567 }
3568 }
3569
3570
3571 if dst.Kind() == abi.Pointer && dst.Elem().Kind() == abi.Array && src.Elem() == dst.Elem().Elem() {
3572 return cvtSliceArrayPtr
3573 }
3574
3575
3576 if dst.Kind() == abi.Array && src.Elem() == dst.Elem() {
3577 return cvtSliceArray
3578 }
3579
3580 case Chan:
3581 if dst.Kind() == abi.Chan && specialChannelAssignability(dst, src) {
3582 return cvtDirect
3583 }
3584 }
3585
3586
3587 if haveIdenticalUnderlyingType(dst, src, false) {
3588 return cvtDirect
3589 }
3590
3591
3592 if dst.Kind() == abi.Pointer && nameFor(dst) == "" &&
3593 src.Kind() == abi.Pointer && nameFor(src) == "" &&
3594 haveIdenticalUnderlyingType(elem(dst), elem(src), false) {
3595 return cvtDirect
3596 }
3597
3598 if implements(dst, src) {
3599 if src.Kind() == abi.Interface {
3600 return cvtI2I
3601 }
3602 return cvtT2I
3603 }
3604
3605 return nil
3606 }
3607
3608
3609
3610 func makeInt(f flag, bits uint64, t Type) Value {
3611 typ := t.common()
3612 ptr := unsafe_New(typ)
3613 switch typ.Size() {
3614 case 1:
3615 *(*uint8)(ptr) = uint8(bits)
3616 case 2:
3617 *(*uint16)(ptr) = uint16(bits)
3618 case 4:
3619 *(*uint32)(ptr) = uint32(bits)
3620 case 8:
3621 *(*uint64)(ptr) = bits
3622 }
3623 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3624 }
3625
3626
3627
3628 func makeFloat(f flag, v float64, t Type) Value {
3629 typ := t.common()
3630 ptr := unsafe_New(typ)
3631 switch typ.Size() {
3632 case 4:
3633 *(*float32)(ptr) = float32(v)
3634 case 8:
3635 *(*float64)(ptr) = v
3636 }
3637 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3638 }
3639
3640
3641 func makeFloat32(f flag, v float32, t Type) Value {
3642 typ := t.common()
3643 ptr := unsafe_New(typ)
3644 *(*float32)(ptr) = v
3645 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3646 }
3647
3648
3649
3650 func makeComplex(f flag, v complex128, t Type) Value {
3651 typ := t.common()
3652 ptr := unsafe_New(typ)
3653 switch typ.Size() {
3654 case 8:
3655 *(*complex64)(ptr) = complex64(v)
3656 case 16:
3657 *(*complex128)(ptr) = v
3658 }
3659 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3660 }
3661
3662 func makeString(f flag, v string, t Type) Value {
3663 ret := New(t).Elem()
3664 ret.SetString(v)
3665 ret.flag = ret.flag&^flagAddr | f
3666 return ret
3667 }
3668
3669 func makeBytes(f flag, v []byte, t Type) Value {
3670 ret := New(t).Elem()
3671 ret.SetBytes(v)
3672 ret.flag = ret.flag&^flagAddr | f
3673 return ret
3674 }
3675
3676 func makeRunes(f flag, v []rune, t Type) Value {
3677 ret := New(t).Elem()
3678 ret.setRunes(v)
3679 ret.flag = ret.flag&^flagAddr | f
3680 return ret
3681 }
3682
3683
3684
3685
3686
3687
3688
3689 func cvtInt(v Value, t Type) Value {
3690 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3691 }
3692
3693
3694 func cvtUint(v Value, t Type) Value {
3695 return makeInt(v.flag.ro(), v.Uint(), t)
3696 }
3697
3698
3699 func cvtFloatInt(v Value, t Type) Value {
3700 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3701 }
3702
3703
3704 func cvtFloatUint(v Value, t Type) Value {
3705 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3706 }
3707
3708
3709 func cvtIntFloat(v Value, t Type) Value {
3710 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3711 }
3712
3713
3714 func cvtUintFloat(v Value, t Type) Value {
3715 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3716 }
3717
3718
3719 func cvtFloat(v Value, t Type) Value {
3720 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3721
3722
3723
3724 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3725 }
3726 return makeFloat(v.flag.ro(), v.Float(), t)
3727 }
3728
3729
3730 func cvtComplex(v Value, t Type) Value {
3731 return makeComplex(v.flag.ro(), v.Complex(), t)
3732 }
3733
3734
3735 func cvtIntString(v Value, t Type) Value {
3736 s := "\uFFFD"
3737 if x := v.Int(); int64(rune(x)) == x {
3738 s = string(rune(x))
3739 }
3740 return makeString(v.flag.ro(), s, t)
3741 }
3742
3743
3744 func cvtUintString(v Value, t Type) Value {
3745 s := "\uFFFD"
3746 if x := v.Uint(); uint64(rune(x)) == x {
3747 s = string(rune(x))
3748 }
3749 return makeString(v.flag.ro(), s, t)
3750 }
3751
3752
3753 func cvtBytesString(v Value, t Type) Value {
3754 return makeString(v.flag.ro(), string(v.Bytes()), t)
3755 }
3756
3757
3758 func cvtStringBytes(v Value, t Type) Value {
3759 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3760 }
3761
3762
3763 func cvtRunesString(v Value, t Type) Value {
3764 return makeString(v.flag.ro(), string(v.runes()), t)
3765 }
3766
3767
3768 func cvtStringRunes(v Value, t Type) Value {
3769 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3770 }
3771
3772
3773 func cvtSliceArrayPtr(v Value, t Type) Value {
3774 n := t.Elem().Len()
3775 if n > v.Len() {
3776 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to pointer to array with length " + itoa.Itoa(n))
3777 }
3778 h := (*unsafeheader.Slice)(v.ptr)
3779 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
3780 }
3781
3782
3783 func cvtSliceArray(v Value, t Type) Value {
3784 n := t.Len()
3785 if n > v.Len() {
3786 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to array with length " + itoa.Itoa(n))
3787 }
3788 h := (*unsafeheader.Slice)(v.ptr)
3789 typ := t.common()
3790 ptr := h.Data
3791 c := unsafe_New(typ)
3792 typedmemmove(typ, c, ptr)
3793 ptr = c
3794
3795 return Value{typ, ptr, v.flag&^(flagAddr|flagKindMask) | flag(Array)}
3796 }
3797
3798
3799 func cvtDirect(v Value, typ Type) Value {
3800 f := v.flag
3801 t := typ.common()
3802 ptr := v.ptr
3803 if f&flagAddr != 0 {
3804
3805 c := unsafe_New(t)
3806 typedmemmove(t, c, ptr)
3807 ptr = c
3808 f &^= flagAddr
3809 }
3810 return Value{t, ptr, v.flag.ro() | f}
3811 }
3812
3813
3814 func cvtT2I(v Value, typ Type) Value {
3815 target := unsafe_New(typ.common())
3816 x := valueInterface(v, false)
3817 if typ.NumMethod() == 0 {
3818 *(*any)(target) = x
3819 } else {
3820 ifaceE2I(typ.common(), x, target)
3821 }
3822 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3823 }
3824
3825
3826 func cvtI2I(v Value, typ Type) Value {
3827 if v.IsNil() {
3828 ret := Zero(typ)
3829 ret.flag |= v.flag.ro()
3830 return ret
3831 }
3832 return cvtT2I(v.Elem(), typ)
3833 }
3834
3835
3836
3837
3838 func chancap(ch unsafe.Pointer) int
3839
3840
3841 func chanclose(ch unsafe.Pointer)
3842
3843
3844 func chanlen(ch unsafe.Pointer) int
3845
3846
3847
3848
3849
3850
3851
3852
3853
3854 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3855
3856
3857 func chansend0(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3858
3859 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool {
3860 contentEscapes(val)
3861 return chansend0(ch, val, nb)
3862 }
3863
3864 func makechan(typ *abi.Type, size int) (ch unsafe.Pointer)
3865 func makemap(t *abi.Type, cap int) (m unsafe.Pointer)
3866
3867
3868 func mapaccess(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3869
3870
3871 func mapaccess_faststr(t *abi.Type, m unsafe.Pointer, key string) (val unsafe.Pointer)
3872
3873
3874 func mapassign0(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer)
3875
3876
3877
3878
3879
3880
3881
3882
3883
3884
3885
3886 func mapassign(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer) {
3887 contentEscapes(key)
3888 contentEscapes(val)
3889 mapassign0(t, m, key, val)
3890 }
3891
3892
3893 func mapassign_faststr0(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer)
3894
3895 func mapassign_faststr(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer) {
3896 contentEscapes((*unsafeheader.String)(unsafe.Pointer(&key)).Data)
3897 contentEscapes(val)
3898 mapassign_faststr0(t, m, key, val)
3899 }
3900
3901
3902 func mapdelete(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer)
3903
3904
3905 func mapdelete_faststr(t *abi.Type, m unsafe.Pointer, key string)
3906
3907
3908 func mapiterinit(t *abi.Type, m unsafe.Pointer, it *hiter)
3909
3910
3911 func mapiterkey(it *hiter) (key unsafe.Pointer)
3912
3913
3914 func mapiterelem(it *hiter) (elem unsafe.Pointer)
3915
3916
3917 func mapiternext(it *hiter)
3918
3919
3920 func maplen(m unsafe.Pointer) int
3921
3922 func mapclear(t *abi.Type, m unsafe.Pointer)
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936
3937
3938
3939
3940
3941
3942
3943
3944
3945
3946
3947
3948
3949
3950 func call(stackArgsType *abi.Type, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3951
3952 func ifaceE2I(t *abi.Type, src any, dst unsafe.Pointer)
3953
3954
3955
3956
3957 func memmove(dst, src unsafe.Pointer, size uintptr)
3958
3959
3960
3961
3962 func typedmemmove(t *abi.Type, dst, src unsafe.Pointer)
3963
3964
3965
3966
3967 func typedmemclr(t *abi.Type, ptr unsafe.Pointer)
3968
3969
3970
3971
3972
3973 func typedmemclrpartial(t *abi.Type, ptr unsafe.Pointer, off, size uintptr)
3974
3975
3976
3977
3978
3979 func typedslicecopy(t *abi.Type, dst, src unsafeheader.Slice) int
3980
3981
3982
3983
3984
3985 func typedarrayclear(elemType *abi.Type, ptr unsafe.Pointer, len int)
3986
3987
3988 func typehash(t *abi.Type, p unsafe.Pointer, h uintptr) uintptr
3989
3990 func verifyNotInHeapPtr(p uintptr) bool
3991
3992
3993 func growslice(t *abi.Type, old unsafeheader.Slice, num int) unsafeheader.Slice
3994
3995
3996 func unsafeslice(t *abi.Type, ptr unsafe.Pointer, len int)
3997
3998
3999
4000
4001 func escapes(x any) {
4002 if dummy.b {
4003 dummy.x = x
4004 }
4005 }
4006
4007 var dummy struct {
4008 b bool
4009 x any
4010 }
4011
4012
4013
4014
4015
4016 func contentEscapes(x unsafe.Pointer) {
4017 if dummy.b {
4018 escapes(*(*any)(x))
4019 }
4020 }
4021
4022
4023
4024
4025
4026
4027
4028 func noescape(p unsafe.Pointer) unsafe.Pointer {
4029 return abi.NoEscape(p)
4030 }
4031
View as plain text