Source file
src/reflect/value.go
1
2
3
4
5 package reflect
6
7 import (
8 "errors"
9 "internal/abi"
10 "internal/goarch"
11 "internal/strconv"
12 "internal/unsafeheader"
13 "iter"
14 "math"
15 "runtime"
16 "unsafe"
17 )
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40 type Value struct {
41
42
43 typ_ *abi.Type
44
45
46
47 ptr unsafe.Pointer
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63 flag
64
65
66
67
68
69
70 }
71
72 type flag uintptr
73
74 const (
75 flagKindWidth = 5
76 flagKindMask flag = 1<<flagKindWidth - 1
77 flagStickyRO flag = 1 << 5
78 flagEmbedRO flag = 1 << 6
79 flagIndir flag = 1 << 7
80 flagAddr flag = 1 << 8
81 flagMethod flag = 1 << 9
82 flagMethodShift = 10
83 flagRO flag = flagStickyRO | flagEmbedRO
84 )
85
86 func (f flag) kind() Kind {
87 return Kind(f & flagKindMask)
88 }
89
90 func (f flag) ro() flag {
91 if f&flagRO != 0 {
92 return flagStickyRO
93 }
94 return 0
95 }
96
97
98
99
100 func (v Value) typ() *abi.Type {
101
102
103
104
105
106 return (*abi.Type)(abi.NoEscape(unsafe.Pointer(v.typ_)))
107 }
108
109
110
111
112 func (v Value) pointer() unsafe.Pointer {
113 if v.typ().Size() != goarch.PtrSize || !v.typ().Pointers() {
114 panic("can't call pointer on a non-pointer Value")
115 }
116 if v.flag&flagIndir != 0 {
117 return *(*unsafe.Pointer)(v.ptr)
118 }
119 return v.ptr
120 }
121
122
123 func packEface(v Value) any {
124 return *(*any)(unsafe.Pointer(&abi.EmptyInterface{
125 Type: v.typ(),
126 Data: packEfaceData(v),
127 }))
128 }
129
130
131
132 func packEfaceData(v Value) unsafe.Pointer {
133 t := v.typ()
134 switch {
135 case !t.IsDirectIface():
136 if v.flag&flagIndir == 0 {
137 panic("bad indir")
138 }
139
140 ptr := v.ptr
141 if v.flag&flagAddr != 0 {
142 c := unsafe_New(t)
143 typedmemmove(t, c, ptr)
144 ptr = c
145 }
146 return ptr
147 case v.flag&flagIndir != 0:
148
149
150 return *(*unsafe.Pointer)(v.ptr)
151 default:
152
153 return v.ptr
154 }
155 }
156
157
158 func unpackEface(i any) Value {
159 e := (*abi.EmptyInterface)(unsafe.Pointer(&i))
160 t := e.Type
161 if t == nil {
162 return Value{}
163 }
164 f := flag(t.Kind())
165 if !t.IsDirectIface() {
166 f |= flagIndir
167 }
168 return Value{t, e.Data, f}
169 }
170
171
172
173
174 type ValueError struct {
175 Method string
176 Kind Kind
177 }
178
179 func (e *ValueError) Error() string {
180 if e.Kind == 0 {
181 return "reflect: call of " + e.Method + " on zero Value"
182 }
183 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
184 }
185
186
187 func valueMethodName() string {
188 var pc [5]uintptr
189 n := runtime.Callers(1, pc[:])
190 frames := runtime.CallersFrames(pc[:n])
191 var frame runtime.Frame
192 for more := true; more; {
193 const prefix = "reflect.Value."
194 frame, more = frames.Next()
195 name := frame.Function
196 if len(name) > len(prefix) && name[:len(prefix)] == prefix {
197 methodName := name[len(prefix):]
198 if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' {
199 return name
200 }
201 }
202 }
203 return "unknown method"
204 }
205
206
207 type nonEmptyInterface struct {
208 itab *abi.ITab
209 word unsafe.Pointer
210 }
211
212
213
214
215
216
217
218 func (f flag) mustBe(expected Kind) {
219
220 if Kind(f&flagKindMask) != expected {
221 panic(&ValueError{valueMethodName(), f.kind()})
222 }
223 }
224
225
226
227 func (f flag) mustBeExported() {
228 if f == 0 || f&flagRO != 0 {
229 f.mustBeExportedSlow()
230 }
231 }
232
233 func (f flag) mustBeExportedSlow() {
234 if f == 0 {
235 panic(&ValueError{valueMethodName(), Invalid})
236 }
237 if f&flagRO != 0 {
238 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
239 }
240 }
241
242
243
244
245 func (f flag) mustBeAssignable() {
246 if f&flagRO != 0 || f&flagAddr == 0 {
247 f.mustBeAssignableSlow()
248 }
249 }
250
251 func (f flag) mustBeAssignableSlow() {
252 if f == 0 {
253 panic(&ValueError{valueMethodName(), Invalid})
254 }
255
256 if f&flagRO != 0 {
257 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
258 }
259 if f&flagAddr == 0 {
260 panic("reflect: " + valueMethodName() + " using unaddressable value")
261 }
262 }
263
264
265
266
267
268
269 func (v Value) Addr() Value {
270 if v.flag&flagAddr == 0 {
271 panic("reflect.Value.Addr of unaddressable value")
272 }
273
274
275 fl := v.flag & flagRO
276 return Value{ptrTo(v.typ()), v.ptr, fl | flag(Pointer)}
277 }
278
279
280
281 func (v Value) Bool() bool {
282
283 if v.kind() != Bool {
284 v.panicNotBool()
285 }
286 return *(*bool)(v.ptr)
287 }
288
289 func (v Value) panicNotBool() {
290 v.mustBe(Bool)
291 }
292
293 var bytesType = rtypeOf(([]byte)(nil))
294
295
296
297
298 func (v Value) Bytes() []byte {
299
300 if v.typ_ == bytesType {
301 return *(*[]byte)(v.ptr)
302 }
303 return v.bytesSlow()
304 }
305
306 func (v Value) bytesSlow() []byte {
307 switch v.kind() {
308 case Slice:
309 if v.typ().Elem().Kind() != abi.Uint8 {
310 panic("reflect.Value.Bytes of non-byte slice")
311 }
312
313 return *(*[]byte)(v.ptr)
314 case Array:
315 if v.typ().Elem().Kind() != abi.Uint8 {
316 panic("reflect.Value.Bytes of non-byte array")
317 }
318 if !v.CanAddr() {
319 panic("reflect.Value.Bytes of unaddressable byte array")
320 }
321 p := (*byte)(v.ptr)
322 n := int((*arrayType)(unsafe.Pointer(v.typ())).Len)
323 return unsafe.Slice(p, n)
324 }
325 panic(&ValueError{"reflect.Value.Bytes", v.kind()})
326 }
327
328
329
330 func (v Value) runes() []rune {
331 v.mustBe(Slice)
332 if v.typ().Elem().Kind() != abi.Int32 {
333 panic("reflect.Value.Bytes of non-rune slice")
334 }
335
336 return *(*[]rune)(v.ptr)
337 }
338
339
340
341
342
343
344 func (v Value) CanAddr() bool {
345 return v.flag&flagAddr != 0
346 }
347
348
349
350
351
352
353 func (v Value) CanSet() bool {
354 return v.flag&(flagAddr|flagRO) == flagAddr
355 }
356
357
358
359
360
361
362
363
364
365
366 func (v Value) Call(in []Value) []Value {
367 v.mustBe(Func)
368 v.mustBeExported()
369 return v.call("Call", in)
370 }
371
372
373
374
375
376
377
378
379
380 func (v Value) CallSlice(in []Value) []Value {
381 v.mustBe(Func)
382 v.mustBeExported()
383 return v.call("CallSlice", in)
384 }
385
386 var callGC bool
387
388 const debugReflectCall = false
389
390 func (v Value) call(op string, in []Value) []Value {
391
392 t := (*funcType)(unsafe.Pointer(v.typ()))
393 var (
394 fn unsafe.Pointer
395 rcvr Value
396 rcvrtype *abi.Type
397 )
398 if v.flag&flagMethod != 0 {
399 rcvr = v
400 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
401 } else if v.flag&flagIndir != 0 {
402 fn = *(*unsafe.Pointer)(v.ptr)
403 } else {
404 fn = v.ptr
405 }
406
407 if fn == nil {
408 panic("reflect.Value.Call: call of nil function")
409 }
410
411 isSlice := op == "CallSlice"
412 n := t.NumIn()
413 isVariadic := t.IsVariadic()
414 if isSlice {
415 if !isVariadic {
416 panic("reflect: CallSlice of non-variadic function")
417 }
418 if len(in) < n {
419 panic("reflect: CallSlice with too few input arguments")
420 }
421 if len(in) > n {
422 panic("reflect: CallSlice with too many input arguments")
423 }
424 } else {
425 if isVariadic {
426 n--
427 }
428 if len(in) < n {
429 panic("reflect: Call with too few input arguments")
430 }
431 if !isVariadic && len(in) > n {
432 panic("reflect: Call with too many input arguments")
433 }
434 }
435 for _, x := range in {
436 if x.Kind() == Invalid {
437 panic("reflect: " + op + " using zero Value argument")
438 }
439 }
440 for i := 0; i < n; i++ {
441 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(toRType(targ)) {
442 panic("reflect: " + op + " using " + xt.String() + " as type " + stringFor(targ))
443 }
444 }
445 if !isSlice && isVariadic {
446
447 m := len(in) - n
448 slice := MakeSlice(toRType(t.In(n)), m, m)
449 elem := toRType(t.In(n)).Elem()
450 for i := 0; i < m; i++ {
451 x := in[n+i]
452 if xt := x.Type(); !xt.AssignableTo(elem) {
453 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
454 }
455 slice.Index(i).Set(x)
456 }
457 origIn := in
458 in = make([]Value, n+1)
459 copy(in[:n], origIn)
460 in[n] = slice
461 }
462
463 nin := len(in)
464 if nin != t.NumIn() {
465 panic("reflect.Value.Call: wrong argument count")
466 }
467 nout := t.NumOut()
468
469
470 var regArgs abi.RegArgs
471
472
473 frametype, framePool, abid := funcLayout(t, rcvrtype)
474
475
476 var stackArgs unsafe.Pointer
477 if frametype.Size() != 0 {
478 if nout == 0 {
479 stackArgs = framePool.Get().(unsafe.Pointer)
480 } else {
481
482
483 stackArgs = unsafe_New(frametype)
484 }
485 }
486 frameSize := frametype.Size()
487
488 if debugReflectCall {
489 println("reflect.call", stringFor(&t.Type))
490 abid.dump()
491 }
492
493
494
495
496 inStart := 0
497 if rcvrtype != nil {
498
499
500
501 switch st := abid.call.steps[0]; st.kind {
502 case abiStepStack:
503 storeRcvr(rcvr, stackArgs)
504 case abiStepPointer:
505 storeRcvr(rcvr, unsafe.Pointer(®Args.Ptrs[st.ireg]))
506 fallthrough
507 case abiStepIntReg:
508 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
509 case abiStepFloatReg:
510 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
511 default:
512 panic("unknown ABI parameter kind")
513 }
514 inStart = 1
515 }
516
517
518 for i, v := range in {
519 v.mustBeExported()
520 targ := toRType(t.In(i))
521
522
523
524 v = v.assignTo("reflect.Value.Call", &targ.t, nil)
525 stepsLoop:
526 for _, st := range abid.call.stepsForValue(i + inStart) {
527 switch st.kind {
528 case abiStepStack:
529
530 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
531 if v.flag&flagIndir != 0 {
532 typedmemmove(&targ.t, addr, v.ptr)
533 } else {
534 *(*unsafe.Pointer)(addr) = v.ptr
535 }
536
537 break stepsLoop
538 case abiStepIntReg, abiStepPointer:
539
540 if v.flag&flagIndir != 0 {
541 offset := add(v.ptr, st.offset, "precomputed value offset")
542 if st.kind == abiStepPointer {
543
544
545
546 regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset)
547 }
548 intToReg(®Args, st.ireg, st.size, offset)
549 } else {
550 if st.kind == abiStepPointer {
551
552 regArgs.Ptrs[st.ireg] = v.ptr
553 }
554 regArgs.Ints[st.ireg] = uintptr(v.ptr)
555 }
556 case abiStepFloatReg:
557
558 if v.flag&flagIndir == 0 {
559 panic("attempted to copy pointer to FP register")
560 }
561 offset := add(v.ptr, st.offset, "precomputed value offset")
562 floatToReg(®Args, st.freg, st.size, offset)
563 default:
564 panic("unknown ABI part kind")
565 }
566 }
567 }
568
569
570 frameSize = align(frameSize, goarch.PtrSize)
571 frameSize += abid.spill
572
573
574 regArgs.ReturnIsPtr = abid.outRegPtrs
575
576 if debugReflectCall {
577 regArgs.Dump()
578 }
579
580
581 if callGC {
582 runtime.GC()
583 }
584
585
586 call(frametype, fn, stackArgs, uint32(frametype.Size()), uint32(abid.retOffset), uint32(frameSize), ®Args)
587
588
589 if callGC {
590 runtime.GC()
591 }
592
593 var ret []Value
594 if nout == 0 {
595 if stackArgs != nil {
596 typedmemclr(frametype, stackArgs)
597 framePool.Put(stackArgs)
598 }
599 } else {
600 if stackArgs != nil {
601
602
603
604 typedmemclrpartial(frametype, stackArgs, 0, abid.retOffset)
605 }
606
607
608 ret = make([]Value, nout)
609 for i := 0; i < nout; i++ {
610 tv := t.Out(i)
611 if tv.Size() == 0 {
612
613
614 ret[i] = Zero(toRType(tv))
615 continue
616 }
617 steps := abid.ret.stepsForValue(i)
618 if st := steps[0]; st.kind == abiStepStack {
619
620
621
622 fl := flagIndir | flag(tv.Kind())
623 ret[i] = Value{tv, add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
624
625
626
627
628 continue
629 }
630
631
632 if tv.IsDirectIface() {
633
634
635 if steps[0].kind != abiStepPointer {
636 print("kind=", steps[0].kind, ", type=", stringFor(tv), "\n")
637 panic("mismatch between ABI description and types")
638 }
639 ret[i] = Value{tv, regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
640 continue
641 }
642
643
644
645
646
647
648
649
650
651
652 s := unsafe_New(tv)
653 for _, st := range steps {
654 switch st.kind {
655 case abiStepIntReg:
656 offset := add(s, st.offset, "precomputed value offset")
657 intFromReg(®Args, st.ireg, st.size, offset)
658 case abiStepPointer:
659 s := add(s, st.offset, "precomputed value offset")
660 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
661 case abiStepFloatReg:
662 offset := add(s, st.offset, "precomputed value offset")
663 floatFromReg(®Args, st.freg, st.size, offset)
664 case abiStepStack:
665 panic("register-based return value has stack component")
666 default:
667 panic("unknown ABI part kind")
668 }
669 }
670 ret[i] = Value{tv, s, flagIndir | flag(tv.Kind())}
671 }
672 }
673
674 return ret
675 }
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
698 if callGC {
699
700
701
702
703
704 runtime.GC()
705 }
706 ftyp := ctxt.ftyp
707 f := ctxt.fn
708
709 _, _, abid := funcLayout(ftyp, nil)
710
711
712 ptr := frame
713 in := make([]Value, 0, int(ftyp.InCount))
714 for i, typ := range ftyp.InSlice() {
715 if typ.Size() == 0 {
716 in = append(in, Zero(toRType(typ)))
717 continue
718 }
719 v := Value{typ, nil, flag(typ.Kind())}
720 steps := abid.call.stepsForValue(i)
721 if st := steps[0]; st.kind == abiStepStack {
722 if !typ.IsDirectIface() {
723
724
725
726
727 v.ptr = unsafe_New(typ)
728 if typ.Size() > 0 {
729 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
730 }
731 v.flag |= flagIndir
732 } else {
733 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
734 }
735 } else {
736 if !typ.IsDirectIface() {
737
738
739 v.flag |= flagIndir
740 v.ptr = unsafe_New(typ)
741 for _, st := range steps {
742 switch st.kind {
743 case abiStepIntReg:
744 offset := add(v.ptr, st.offset, "precomputed value offset")
745 intFromReg(regs, st.ireg, st.size, offset)
746 case abiStepPointer:
747 s := add(v.ptr, st.offset, "precomputed value offset")
748 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
749 case abiStepFloatReg:
750 offset := add(v.ptr, st.offset, "precomputed value offset")
751 floatFromReg(regs, st.freg, st.size, offset)
752 case abiStepStack:
753 panic("register-based return value has stack component")
754 default:
755 panic("unknown ABI part kind")
756 }
757 }
758 } else {
759
760
761 if steps[0].kind != abiStepPointer {
762 print("kind=", steps[0].kind, ", type=", stringFor(typ), "\n")
763 panic("mismatch between ABI description and types")
764 }
765 v.ptr = regs.Ptrs[steps[0].ireg]
766 }
767 }
768 in = append(in, v)
769 }
770
771
772 out := f(in)
773 numOut := ftyp.NumOut()
774 if len(out) != numOut {
775 panic("reflect: wrong return count from function created by MakeFunc")
776 }
777
778
779 if numOut > 0 {
780 for i, typ := range ftyp.OutSlice() {
781 v := out[i]
782 if v.typ() == nil {
783 panic("reflect: function created by MakeFunc using " + funcName(f) +
784 " returned zero Value")
785 }
786 if v.flag&flagRO != 0 {
787 panic("reflect: function created by MakeFunc using " + funcName(f) +
788 " returned value obtained from unexported field")
789 }
790 if typ.Size() == 0 {
791 continue
792 }
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808 v = v.assignTo("reflect.MakeFunc", typ, nil)
809 stepsLoop:
810 for _, st := range abid.ret.stepsForValue(i) {
811 switch st.kind {
812 case abiStepStack:
813
814 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
815
816
817
818
819 if v.flag&flagIndir != 0 {
820 memmove(addr, v.ptr, st.size)
821 } else {
822
823 *(*uintptr)(addr) = uintptr(v.ptr)
824 }
825
826 break stepsLoop
827 case abiStepIntReg, abiStepPointer:
828
829 if v.flag&flagIndir != 0 {
830 offset := add(v.ptr, st.offset, "precomputed value offset")
831 intToReg(regs, st.ireg, st.size, offset)
832 } else {
833
834
835
836
837
838 regs.Ints[st.ireg] = uintptr(v.ptr)
839 }
840 case abiStepFloatReg:
841
842 if v.flag&flagIndir == 0 {
843 panic("attempted to copy pointer to FP register")
844 }
845 offset := add(v.ptr, st.offset, "precomputed value offset")
846 floatToReg(regs, st.freg, st.size, offset)
847 default:
848 panic("unknown ABI part kind")
849 }
850 }
851 }
852 }
853
854
855
856 *retValid = true
857
858
859
860
861
862 runtime.KeepAlive(out)
863
864
865
866
867 runtime.KeepAlive(ctxt)
868 }
869
870
871
872
873
874
875
876
877 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *abi.Type, t *funcType, fn unsafe.Pointer) {
878 i := methodIndex
879 if v.typ().Kind() == abi.Interface {
880 tt := (*interfaceType)(unsafe.Pointer(v.typ()))
881 if uint(i) >= uint(len(tt.Methods)) {
882 panic("reflect: internal error: invalid method index")
883 }
884 m := &tt.Methods[i]
885 if !tt.nameOff(m.Name).IsExported() {
886 panic("reflect: " + op + " of unexported method")
887 }
888 iface := (*nonEmptyInterface)(v.ptr)
889 if iface.itab == nil {
890 panic("reflect: " + op + " of method on nil interface value")
891 }
892 rcvrtype = iface.itab.Type
893 fn = unsafe.Pointer(&unsafe.Slice(&iface.itab.Fun[0], i+1)[i])
894 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.Typ)))
895 } else {
896 rcvrtype = v.typ()
897 ms := v.typ().ExportedMethods()
898 if uint(i) >= uint(len(ms)) {
899 panic("reflect: internal error: invalid method index")
900 }
901 m := ms[i]
902 if !nameOffFor(v.typ(), m.Name).IsExported() {
903 panic("reflect: " + op + " of unexported method")
904 }
905 ifn := textOffFor(v.typ(), m.Ifn)
906 fn = unsafe.Pointer(&ifn)
907 t = (*funcType)(unsafe.Pointer(typeOffFor(v.typ(), m.Mtyp)))
908 }
909 return
910 }
911
912
913
914
915
916 func storeRcvr(v Value, p unsafe.Pointer) {
917 t := v.typ()
918 if t.Kind() == abi.Interface {
919
920 iface := (*nonEmptyInterface)(v.ptr)
921 *(*unsafe.Pointer)(p) = iface.word
922 } else if v.flag&flagIndir != 0 && t.IsDirectIface() {
923 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
924 } else {
925 *(*unsafe.Pointer)(p) = v.ptr
926 }
927 }
928
929
930
931 func align(x, n uintptr) uintptr {
932 return (x + n - 1) &^ (n - 1)
933 }
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
955 rcvr := ctxt.rcvr
956 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
957
958
959
960
961
962
963
964
965
966 _, _, valueABI := funcLayout(valueFuncType, nil)
967 valueFrame, valueRegs := frame, regs
968 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
969
970
971
972 methodFrame := methodFramePool.Get().(unsafe.Pointer)
973 var methodRegs abi.RegArgs
974
975
976 switch st := methodABI.call.steps[0]; st.kind {
977 case abiStepStack:
978
979
980 storeRcvr(rcvr, methodFrame)
981 case abiStepPointer:
982
983 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ptrs[st.ireg]))
984 fallthrough
985 case abiStepIntReg:
986 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints[st.ireg]))
987 case abiStepFloatReg:
988 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Floats[st.freg]))
989 default:
990 panic("unknown ABI parameter kind")
991 }
992
993
994 for i, t := range valueFuncType.InSlice() {
995 valueSteps := valueABI.call.stepsForValue(i)
996 methodSteps := methodABI.call.stepsForValue(i + 1)
997
998
999 if len(valueSteps) == 0 {
1000 if len(methodSteps) != 0 {
1001 panic("method ABI and value ABI do not align")
1002 }
1003 continue
1004 }
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
1017 mStep := methodSteps[0]
1018
1019 if mStep.kind == abiStepStack {
1020 if vStep.size != mStep.size {
1021 panic("method ABI and value ABI do not align")
1022 }
1023 typedmemmove(t,
1024 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
1025 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
1026 continue
1027 }
1028
1029 for _, mStep := range methodSteps {
1030 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
1031 switch mStep.kind {
1032 case abiStepPointer:
1033
1034 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
1035 fallthrough
1036 case abiStepIntReg:
1037 intToReg(&methodRegs, mStep.ireg, mStep.size, from)
1038 case abiStepFloatReg:
1039 floatToReg(&methodRegs, mStep.freg, mStep.size, from)
1040 default:
1041 panic("unexpected method step")
1042 }
1043 }
1044 continue
1045 }
1046
1047 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1048 for _, vStep := range valueSteps {
1049 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1050 switch vStep.kind {
1051 case abiStepPointer:
1052
1053 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1054 case abiStepIntReg:
1055 intFromReg(valueRegs, vStep.ireg, vStep.size, to)
1056 case abiStepFloatReg:
1057 floatFromReg(valueRegs, vStep.freg, vStep.size, to)
1058 default:
1059 panic("unexpected value step")
1060 }
1061 }
1062 continue
1063 }
1064
1065 if len(valueSteps) != len(methodSteps) {
1066
1067
1068
1069 panic("method ABI and value ABI don't align")
1070 }
1071 for i, vStep := range valueSteps {
1072 mStep := methodSteps[i]
1073 if mStep.kind != vStep.kind {
1074 panic("method ABI and value ABI don't align")
1075 }
1076 switch vStep.kind {
1077 case abiStepPointer:
1078
1079 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1080 fallthrough
1081 case abiStepIntReg:
1082 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1083 case abiStepFloatReg:
1084 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1085 default:
1086 panic("unexpected value step")
1087 }
1088 }
1089 }
1090
1091 methodFrameSize := methodFrameType.Size()
1092
1093
1094 methodFrameSize = align(methodFrameSize, goarch.PtrSize)
1095 methodFrameSize += methodABI.spill
1096
1097
1098 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1099
1100
1101
1102
1103 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.Size()), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114 if valueRegs != nil {
1115 *valueRegs = methodRegs
1116 }
1117 if retSize := methodFrameType.Size() - methodABI.retOffset; retSize > 0 {
1118 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1119 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1120
1121 memmove(valueRet, methodRet, retSize)
1122 }
1123
1124
1125
1126 *retValid = true
1127
1128
1129
1130
1131 typedmemclr(methodFrameType, methodFrame)
1132 methodFramePool.Put(methodFrame)
1133
1134
1135 runtime.KeepAlive(ctxt)
1136
1137
1138
1139
1140 runtime.KeepAlive(valueRegs)
1141 }
1142
1143
1144 func funcName(f func([]Value) []Value) string {
1145 pc := *(*uintptr)(unsafe.Pointer(&f))
1146 rf := runtime.FuncForPC(pc)
1147 if rf != nil {
1148 return rf.Name()
1149 }
1150 return "closure"
1151 }
1152
1153
1154
1155 func (v Value) Cap() int {
1156
1157 if v.kind() == Slice {
1158 return (*unsafeheader.Slice)(v.ptr).Cap
1159 }
1160 return v.capNonSlice()
1161 }
1162
1163 func (v Value) capNonSlice() int {
1164 k := v.kind()
1165 switch k {
1166 case Array:
1167 return v.typ().Len()
1168 case Chan:
1169 return chancap(v.pointer())
1170 case Ptr:
1171 if v.typ().Elem().Kind() == abi.Array {
1172 return v.typ().Elem().Len()
1173 }
1174 panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
1175 }
1176 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1177 }
1178
1179
1180
1181
1182 func (v Value) Close() {
1183 v.mustBe(Chan)
1184 v.mustBeExported()
1185 tt := (*chanType)(unsafe.Pointer(v.typ()))
1186 if ChanDir(tt.Dir)&SendDir == 0 {
1187 panic("reflect: close of receive-only channel")
1188 }
1189
1190 chanclose(v.pointer())
1191 }
1192
1193
1194 func (v Value) CanComplex() bool {
1195 switch v.kind() {
1196 case Complex64, Complex128:
1197 return true
1198 default:
1199 return false
1200 }
1201 }
1202
1203
1204
1205 func (v Value) Complex() complex128 {
1206 k := v.kind()
1207 switch k {
1208 case Complex64:
1209 return complex128(*(*complex64)(v.ptr))
1210 case Complex128:
1211 return *(*complex128)(v.ptr)
1212 }
1213 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1214 }
1215
1216
1217
1218
1219
1220 func (v Value) Elem() Value {
1221 k := v.kind()
1222 switch k {
1223 case Interface:
1224 x := unpackEface(packIfaceValueIntoEmptyIface(v))
1225 if x.flag != 0 {
1226 x.flag |= v.flag.ro()
1227 }
1228 return x
1229 case Pointer:
1230 ptr := v.ptr
1231 if v.flag&flagIndir != 0 {
1232 if !v.typ().IsDirectIface() {
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243 if !verifyNotInHeapPtr(*(*uintptr)(ptr)) {
1244 panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
1245 }
1246 }
1247 ptr = *(*unsafe.Pointer)(ptr)
1248 }
1249
1250 if ptr == nil {
1251 return Value{}
1252 }
1253 tt := (*ptrType)(unsafe.Pointer(v.typ()))
1254 typ := tt.Elem
1255 fl := v.flag&flagRO | flagIndir | flagAddr
1256 fl |= flag(typ.Kind())
1257 return Value{typ, ptr, fl}
1258 }
1259 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1260 }
1261
1262
1263
1264 func (v Value) Field(i int) Value {
1265 if v.kind() != Struct {
1266 panic(&ValueError{"reflect.Value.Field", v.kind()})
1267 }
1268 tt := (*structType)(unsafe.Pointer(v.typ()))
1269 if uint(i) >= uint(len(tt.Fields)) {
1270 panic("reflect: Field index out of range")
1271 }
1272 field := &tt.Fields[i]
1273 typ := field.Typ
1274
1275
1276 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1277
1278 if !field.Name.IsExported() {
1279 if field.Embedded() {
1280 fl |= flagEmbedRO
1281 } else {
1282 fl |= flagStickyRO
1283 }
1284 }
1285 if fl&flagIndir == 0 && typ.Size() == 0 {
1286
1287
1288
1289
1290
1291
1292
1293
1294 return Value{typ, unsafe.Pointer(&zeroVal[0]), fl | flagIndir}
1295 }
1296
1297
1298
1299
1300
1301
1302 ptr := add(v.ptr, field.Offset, "same as non-reflect &v.field")
1303 return Value{typ, ptr, fl}
1304 }
1305
1306
1307
1308
1309 func (v Value) FieldByIndex(index []int) Value {
1310 if len(index) == 1 {
1311 return v.Field(index[0])
1312 }
1313 v.mustBe(Struct)
1314 for i, x := range index {
1315 if i > 0 {
1316 if v.Kind() == Pointer && v.typ().Elem().Kind() == abi.Struct {
1317 if v.IsNil() {
1318 panic("reflect: indirection through nil pointer to embedded struct")
1319 }
1320 v = v.Elem()
1321 }
1322 }
1323 v = v.Field(x)
1324 }
1325 return v
1326 }
1327
1328
1329
1330
1331
1332 func (v Value) FieldByIndexErr(index []int) (Value, error) {
1333 if len(index) == 1 {
1334 return v.Field(index[0]), nil
1335 }
1336 v.mustBe(Struct)
1337 for i, x := range index {
1338 if i > 0 {
1339 if v.Kind() == Ptr && v.typ().Elem().Kind() == abi.Struct {
1340 if v.IsNil() {
1341 return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(v.typ().Elem()))
1342 }
1343 v = v.Elem()
1344 }
1345 }
1346 v = v.Field(x)
1347 }
1348 return v, nil
1349 }
1350
1351
1352
1353
1354 func (v Value) FieldByName(name string) Value {
1355 v.mustBe(Struct)
1356 if f, ok := toRType(v.typ()).FieldByName(name); ok {
1357 return v.FieldByIndex(f.Index)
1358 }
1359 return Value{}
1360 }
1361
1362
1363
1364
1365
1366 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1367 if f, ok := toRType(v.typ()).FieldByNameFunc(match); ok {
1368 return v.FieldByIndex(f.Index)
1369 }
1370 return Value{}
1371 }
1372
1373
1374 func (v Value) CanFloat() bool {
1375 switch v.kind() {
1376 case Float32, Float64:
1377 return true
1378 default:
1379 return false
1380 }
1381 }
1382
1383
1384
1385 func (v Value) Float() float64 {
1386 k := v.kind()
1387 switch k {
1388 case Float32:
1389 return float64(*(*float32)(v.ptr))
1390 case Float64:
1391 return *(*float64)(v.ptr)
1392 }
1393 panic(&ValueError{"reflect.Value.Float", v.kind()})
1394 }
1395
1396 var uint8Type = rtypeOf(uint8(0))
1397
1398
1399
1400 func (v Value) Index(i int) Value {
1401 switch v.kind() {
1402 case Array:
1403 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1404 if uint(i) >= uint(tt.Len) {
1405 panic("reflect: array index out of range")
1406 }
1407 typ := tt.Elem
1408 offset := uintptr(i) * typ.Size()
1409
1410
1411
1412
1413
1414
1415 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1416 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1417 return Value{typ, val, fl}
1418
1419 case Slice:
1420
1421
1422 s := (*unsafeheader.Slice)(v.ptr)
1423 if uint(i) >= uint(s.Len) {
1424 panic("reflect: slice index out of range")
1425 }
1426 tt := (*sliceType)(unsafe.Pointer(v.typ()))
1427 typ := tt.Elem
1428 val := arrayAt(s.Data, i, typ.Size(), "i < s.Len")
1429 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1430 return Value{typ, val, fl}
1431
1432 case String:
1433 s := (*unsafeheader.String)(v.ptr)
1434 if uint(i) >= uint(s.Len) {
1435 panic("reflect: string index out of range")
1436 }
1437 p := arrayAt(s.Data, i, 1, "i < s.Len")
1438 fl := v.flag.ro() | flag(Uint8) | flagIndir
1439 return Value{uint8Type, p, fl}
1440 }
1441 panic(&ValueError{"reflect.Value.Index", v.kind()})
1442 }
1443
1444
1445 func (v Value) CanInt() bool {
1446 switch v.kind() {
1447 case Int, Int8, Int16, Int32, Int64:
1448 return true
1449 default:
1450 return false
1451 }
1452 }
1453
1454
1455
1456 func (v Value) Int() int64 {
1457 k := v.kind()
1458 p := v.ptr
1459 switch k {
1460 case Int:
1461 return int64(*(*int)(p))
1462 case Int8:
1463 return int64(*(*int8)(p))
1464 case Int16:
1465 return int64(*(*int16)(p))
1466 case Int32:
1467 return int64(*(*int32)(p))
1468 case Int64:
1469 return *(*int64)(p)
1470 }
1471 panic(&ValueError{"reflect.Value.Int", v.kind()})
1472 }
1473
1474
1475 func (v Value) CanInterface() bool {
1476 if v.flag == 0 {
1477 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1478 }
1479 return v.flag&flagRO == 0
1480 }
1481
1482
1483
1484
1485
1486
1487
1488
1489 func (v Value) Interface() (i any) {
1490 return valueInterface(v, true)
1491 }
1492
1493 func valueInterface(v Value, safe bool) any {
1494 if v.flag == 0 {
1495 panic(&ValueError{"reflect.Value.Interface", Invalid})
1496 }
1497 if safe && v.flag&flagRO != 0 {
1498
1499
1500
1501 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1502 }
1503 if v.flag&flagMethod != 0 {
1504 v = makeMethodValue("Interface", v)
1505 }
1506
1507 if v.kind() == Interface {
1508
1509 return packIfaceValueIntoEmptyIface(v)
1510 }
1511
1512 return packEface(v)
1513 }
1514
1515
1516
1517
1518 func TypeAssert[T any](v Value) (T, bool) {
1519 if v.flag == 0 {
1520 panic(&ValueError{"reflect.TypeAssert", Invalid})
1521 }
1522 if v.flag&flagRO != 0 {
1523
1524
1525
1526 panic("reflect.TypeAssert: cannot return value obtained from unexported field or method")
1527 }
1528
1529 if v.flag&flagMethod != 0 {
1530 v = makeMethodValue("TypeAssert", v)
1531 }
1532
1533 typ := abi.TypeFor[T]()
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554 if v.kind() == Interface {
1555 v, ok := packIfaceValueIntoEmptyIface(v).(T)
1556 return v, ok
1557 }
1558
1559
1560
1561
1562
1563 if typ.Kind() == abi.Interface {
1564
1565
1566 iface := *(*any)(unsafe.Pointer(&abi.EmptyInterface{Type: v.typ(), Data: nil}))
1567 if out, ok := iface.(T); ok {
1568
1569
1570
1571 (*abi.CommonInterface)(unsafe.Pointer(&out)).Data = packEfaceData(v)
1572 return out, true
1573 }
1574 var zero T
1575 return zero, false
1576 }
1577
1578
1579
1580 if typ != v.typ() {
1581 var zero T
1582 return zero, false
1583 }
1584 if v.flag&flagIndir == 0 {
1585 return *(*T)(unsafe.Pointer(&v.ptr)), true
1586 }
1587 return *(*T)(v.ptr), true
1588 }
1589
1590
1591
1592
1593 func packIfaceValueIntoEmptyIface(v Value) any {
1594
1595
1596 if v.NumMethod() == 0 {
1597 return *(*any)(v.ptr)
1598 }
1599 return *(*interface {
1600 M()
1601 })(v.ptr)
1602 }
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613 func (v Value) InterfaceData() [2]uintptr {
1614 v.mustBe(Interface)
1615
1616 escapes(v.ptr)
1617
1618
1619
1620
1621
1622 return *(*[2]uintptr)(v.ptr)
1623 }
1624
1625
1626
1627
1628
1629
1630
1631
1632 func (v Value) IsNil() bool {
1633 k := v.kind()
1634 switch k {
1635 case Chan, Func, Map, Pointer, UnsafePointer:
1636 if v.flag&flagMethod != 0 {
1637 return false
1638 }
1639 ptr := v.ptr
1640 if v.flag&flagIndir != 0 {
1641 ptr = *(*unsafe.Pointer)(ptr)
1642 }
1643 return ptr == nil
1644 case Interface, Slice:
1645
1646
1647 return *(*unsafe.Pointer)(v.ptr) == nil
1648 }
1649 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1650 }
1651
1652
1653
1654
1655
1656
1657 func (v Value) IsValid() bool {
1658 return v.flag != 0
1659 }
1660
1661
1662
1663 func (v Value) IsZero() bool {
1664 switch v.kind() {
1665 case Bool:
1666 return !v.Bool()
1667 case Int, Int8, Int16, Int32, Int64:
1668 return v.Int() == 0
1669 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1670 return v.Uint() == 0
1671 case Float32, Float64:
1672 return v.Float() == 0
1673 case Complex64, Complex128:
1674 return v.Complex() == 0
1675 case Array:
1676 if v.flag&flagIndir == 0 {
1677 return v.ptr == nil
1678 }
1679 if v.ptr == unsafe.Pointer(&zeroVal[0]) {
1680 return true
1681 }
1682 typ := (*abi.ArrayType)(unsafe.Pointer(v.typ()))
1683
1684 if typ.Equal != nil && typ.Size() <= abi.ZeroValSize {
1685
1686
1687
1688 return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1689 }
1690 if typ.TFlag&abi.TFlagRegularMemory != 0 {
1691
1692
1693 return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size()))
1694 }
1695 n := int(typ.Len)
1696 for i := 0; i < n; i++ {
1697 if !v.Index(i).IsZero() {
1698 return false
1699 }
1700 }
1701 return true
1702 case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
1703 return v.IsNil()
1704 case String:
1705 return v.Len() == 0
1706 case Struct:
1707 if v.flag&flagIndir == 0 {
1708 return v.ptr == nil
1709 }
1710 if v.ptr == unsafe.Pointer(&zeroVal[0]) {
1711 return true
1712 }
1713 typ := (*abi.StructType)(unsafe.Pointer(v.typ()))
1714
1715 if typ.Equal != nil && typ.Size() <= abi.ZeroValSize {
1716
1717 return typ.Equal(abi.NoEscape(v.ptr), unsafe.Pointer(&zeroVal[0]))
1718 }
1719 if typ.TFlag&abi.TFlagRegularMemory != 0 {
1720
1721
1722 return isZero(unsafe.Slice(((*byte)(v.ptr)), typ.Size()))
1723 }
1724
1725 n := v.NumField()
1726 for i := 0; i < n; i++ {
1727 if !v.Field(i).IsZero() && v.Type().Field(i).Name != "_" {
1728 return false
1729 }
1730 }
1731 return true
1732 default:
1733
1734
1735 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1736 }
1737 }
1738
1739
1740
1741 func isZero(b []byte) bool {
1742 if len(b) == 0 {
1743 return true
1744 }
1745 const n = 32
1746
1747 for uintptr(unsafe.Pointer(&b[0]))%8 != 0 {
1748 if b[0] != 0 {
1749 return false
1750 }
1751 b = b[1:]
1752 if len(b) == 0 {
1753 return true
1754 }
1755 }
1756 for len(b)%8 != 0 {
1757 if b[len(b)-1] != 0 {
1758 return false
1759 }
1760 b = b[:len(b)-1]
1761 }
1762 if len(b) == 0 {
1763 return true
1764 }
1765 w := unsafe.Slice((*uint64)(unsafe.Pointer(&b[0])), len(b)/8)
1766 for len(w)%n != 0 {
1767 if w[0] != 0 {
1768 return false
1769 }
1770 w = w[1:]
1771 }
1772 for len(w) >= n {
1773 if w[0] != 0 || w[1] != 0 || w[2] != 0 || w[3] != 0 ||
1774 w[4] != 0 || w[5] != 0 || w[6] != 0 || w[7] != 0 ||
1775 w[8] != 0 || w[9] != 0 || w[10] != 0 || w[11] != 0 ||
1776 w[12] != 0 || w[13] != 0 || w[14] != 0 || w[15] != 0 ||
1777 w[16] != 0 || w[17] != 0 || w[18] != 0 || w[19] != 0 ||
1778 w[20] != 0 || w[21] != 0 || w[22] != 0 || w[23] != 0 ||
1779 w[24] != 0 || w[25] != 0 || w[26] != 0 || w[27] != 0 ||
1780 w[28] != 0 || w[29] != 0 || w[30] != 0 || w[31] != 0 {
1781 return false
1782 }
1783 w = w[n:]
1784 }
1785 return true
1786 }
1787
1788
1789
1790 func (v Value) SetZero() {
1791 v.mustBeAssignable()
1792 switch v.kind() {
1793 case Bool:
1794 *(*bool)(v.ptr) = false
1795 case Int:
1796 *(*int)(v.ptr) = 0
1797 case Int8:
1798 *(*int8)(v.ptr) = 0
1799 case Int16:
1800 *(*int16)(v.ptr) = 0
1801 case Int32:
1802 *(*int32)(v.ptr) = 0
1803 case Int64:
1804 *(*int64)(v.ptr) = 0
1805 case Uint:
1806 *(*uint)(v.ptr) = 0
1807 case Uint8:
1808 *(*uint8)(v.ptr) = 0
1809 case Uint16:
1810 *(*uint16)(v.ptr) = 0
1811 case Uint32:
1812 *(*uint32)(v.ptr) = 0
1813 case Uint64:
1814 *(*uint64)(v.ptr) = 0
1815 case Uintptr:
1816 *(*uintptr)(v.ptr) = 0
1817 case Float32:
1818 *(*float32)(v.ptr) = 0
1819 case Float64:
1820 *(*float64)(v.ptr) = 0
1821 case Complex64:
1822 *(*complex64)(v.ptr) = 0
1823 case Complex128:
1824 *(*complex128)(v.ptr) = 0
1825 case String:
1826 *(*string)(v.ptr) = ""
1827 case Slice:
1828 *(*unsafeheader.Slice)(v.ptr) = unsafeheader.Slice{}
1829 case Interface:
1830 *(*abi.EmptyInterface)(v.ptr) = abi.EmptyInterface{}
1831 case Chan, Func, Map, Pointer, UnsafePointer:
1832 *(*unsafe.Pointer)(v.ptr) = nil
1833 case Array, Struct:
1834 typedmemclr(v.typ(), v.ptr)
1835 default:
1836
1837
1838 panic(&ValueError{"reflect.Value.SetZero", v.Kind()})
1839 }
1840 }
1841
1842
1843
1844 func (v Value) Kind() Kind {
1845 return v.kind()
1846 }
1847
1848
1849
1850 func (v Value) Len() int {
1851
1852 if v.kind() == Slice {
1853 return (*unsafeheader.Slice)(v.ptr).Len
1854 }
1855 return v.lenNonSlice()
1856 }
1857
1858 func (v Value) lenNonSlice() int {
1859 switch k := v.kind(); k {
1860 case Array:
1861 tt := (*arrayType)(unsafe.Pointer(v.typ()))
1862 return int(tt.Len)
1863 case Chan:
1864 return chanlen(v.pointer())
1865 case Map:
1866 return maplen(v.pointer())
1867 case String:
1868
1869 return (*unsafeheader.String)(v.ptr).Len
1870 case Ptr:
1871 if v.typ().Elem().Kind() == abi.Array {
1872 return v.typ().Elem().Len()
1873 }
1874 panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
1875 }
1876 panic(&ValueError{"reflect.Value.Len", v.kind()})
1877 }
1878
1879
1880
1881 func copyVal(typ *abi.Type, fl flag, ptr unsafe.Pointer) Value {
1882 if !typ.IsDirectIface() {
1883
1884
1885 c := unsafe_New(typ)
1886 typedmemmove(typ, c, ptr)
1887 return Value{typ, c, fl | flagIndir}
1888 }
1889 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
1890 }
1891
1892
1893
1894
1895
1896
1897
1898
1899 func (v Value) Method(i int) Value {
1900 if v.typ() == nil {
1901 panic(&ValueError{"reflect.Value.Method", Invalid})
1902 }
1903 if v.flag&flagMethod != 0 || uint(i) >= uint(toRType(v.typ()).NumMethod()) {
1904 panic("reflect: Method index out of range")
1905 }
1906 if v.typ().Kind() == abi.Interface && v.IsNil() {
1907 panic("reflect: Method on nil interface value")
1908 }
1909 fl := v.flag.ro() | (v.flag & flagIndir)
1910 fl |= flag(Func)
1911 fl |= flag(i)<<flagMethodShift | flagMethod
1912 return Value{v.typ(), v.ptr, fl}
1913 }
1914
1915
1916
1917
1918
1919
1920 func (v Value) NumMethod() int {
1921 if v.typ() == nil {
1922 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
1923 }
1924 if v.flag&flagMethod != 0 {
1925 return 0
1926 }
1927 return toRType(v.typ()).NumMethod()
1928 }
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939 func (v Value) MethodByName(name string) Value {
1940 if v.typ() == nil {
1941 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
1942 }
1943 if v.flag&flagMethod != 0 {
1944 return Value{}
1945 }
1946 m, ok := toRType(v.typ()).MethodByName(name)
1947 if !ok {
1948 return Value{}
1949 }
1950 return v.Method(m.Index)
1951 }
1952
1953
1954
1955 func (v Value) NumField() int {
1956 v.mustBe(Struct)
1957 tt := (*structType)(unsafe.Pointer(v.typ()))
1958 return len(tt.Fields)
1959 }
1960
1961
1962
1963 func (v Value) OverflowComplex(x complex128) bool {
1964 k := v.kind()
1965 switch k {
1966 case Complex64:
1967 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
1968 case Complex128:
1969 return false
1970 }
1971 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
1972 }
1973
1974
1975
1976 func (v Value) OverflowFloat(x float64) bool {
1977 k := v.kind()
1978 switch k {
1979 case Float32:
1980 return overflowFloat32(x)
1981 case Float64:
1982 return false
1983 }
1984 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
1985 }
1986
1987 func overflowFloat32(x float64) bool {
1988 if x < 0 {
1989 x = -x
1990 }
1991 return math.MaxFloat32 < x && x <= math.MaxFloat64
1992 }
1993
1994
1995
1996 func (v Value) OverflowInt(x int64) bool {
1997 k := v.kind()
1998 switch k {
1999 case Int, Int8, Int16, Int32, Int64:
2000 bitSize := v.typ().Size() * 8
2001 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2002 return x != trunc
2003 }
2004 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
2005 }
2006
2007
2008
2009 func (v Value) OverflowUint(x uint64) bool {
2010 k := v.kind()
2011 switch k {
2012 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
2013 bitSize := v.typ_.Size() * 8
2014 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2015 return x != trunc
2016 }
2017 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
2018 }
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041 func (v Value) Pointer() uintptr {
2042
2043 escapes(v.ptr)
2044
2045 k := v.kind()
2046 switch k {
2047 case Pointer:
2048 if !v.typ().Pointers() {
2049 val := *(*uintptr)(v.ptr)
2050
2051
2052 if !verifyNotInHeapPtr(val) {
2053 panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
2054 }
2055 return val
2056 }
2057 fallthrough
2058 case Chan, Map, UnsafePointer:
2059 return uintptr(v.pointer())
2060 case Func:
2061 if v.flag&flagMethod != 0 {
2062
2063
2064
2065
2066
2067
2068 return methodValueCallCodePtr()
2069 }
2070 p := v.pointer()
2071
2072
2073 if p != nil {
2074 p = *(*unsafe.Pointer)(p)
2075 }
2076 return uintptr(p)
2077 case Slice:
2078 return uintptr((*unsafeheader.Slice)(v.ptr).Data)
2079 case String:
2080 return uintptr((*unsafeheader.String)(v.ptr).Data)
2081 }
2082 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
2083 }
2084
2085
2086
2087
2088
2089
2090 func (v Value) Recv() (x Value, ok bool) {
2091 v.mustBe(Chan)
2092 v.mustBeExported()
2093 return v.recv(false)
2094 }
2095
2096
2097
2098 func (v Value) recv(nb bool) (val Value, ok bool) {
2099 tt := (*chanType)(unsafe.Pointer(v.typ()))
2100 if ChanDir(tt.Dir)&RecvDir == 0 {
2101 panic("reflect: recv on send-only channel")
2102 }
2103 t := tt.Elem
2104 val = Value{t, nil, flag(t.Kind())}
2105 var p unsafe.Pointer
2106 if !t.IsDirectIface() {
2107 p = unsafe_New(t)
2108 val.ptr = p
2109 val.flag |= flagIndir
2110 } else {
2111 p = unsafe.Pointer(&val.ptr)
2112 }
2113 selected, ok := chanrecv(v.pointer(), nb, p)
2114 if !selected {
2115 val = Value{}
2116 }
2117 return
2118 }
2119
2120
2121
2122
2123 func (v Value) Send(x Value) {
2124 v.mustBe(Chan)
2125 v.mustBeExported()
2126 v.send(x, false)
2127 }
2128
2129
2130
2131 func (v Value) send(x Value, nb bool) (selected bool) {
2132 tt := (*chanType)(unsafe.Pointer(v.typ()))
2133 if ChanDir(tt.Dir)&SendDir == 0 {
2134 panic("reflect: send on recv-only channel")
2135 }
2136 x.mustBeExported()
2137 x = x.assignTo("reflect.Value.Send", tt.Elem, nil)
2138 var p unsafe.Pointer
2139 if x.flag&flagIndir != 0 {
2140 p = x.ptr
2141 } else {
2142 p = unsafe.Pointer(&x.ptr)
2143 }
2144 return chansend(v.pointer(), p, nb)
2145 }
2146
2147
2148
2149
2150
2151 func (v Value) Set(x Value) {
2152 v.mustBeAssignable()
2153 x.mustBeExported()
2154 var target unsafe.Pointer
2155 if v.kind() == Interface {
2156 target = v.ptr
2157 }
2158 x = x.assignTo("reflect.Set", v.typ(), target)
2159 if x.flag&flagIndir != 0 {
2160 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
2161 typedmemclr(v.typ(), v.ptr)
2162 } else {
2163 typedmemmove(v.typ(), v.ptr, x.ptr)
2164 }
2165 } else {
2166 *(*unsafe.Pointer)(v.ptr) = x.ptr
2167 }
2168 }
2169
2170
2171
2172 func (v Value) SetBool(x bool) {
2173 v.mustBeAssignable()
2174 v.mustBe(Bool)
2175 *(*bool)(v.ptr) = x
2176 }
2177
2178
2179
2180
2181 func (v Value) SetBytes(x []byte) {
2182 v.mustBeAssignable()
2183 v.mustBe(Slice)
2184 if toRType(v.typ()).Elem().Kind() != Uint8 {
2185 panic("reflect.Value.SetBytes of non-byte slice")
2186 }
2187 *(*[]byte)(v.ptr) = x
2188 }
2189
2190
2191
2192
2193 func (v Value) setRunes(x []rune) {
2194 v.mustBeAssignable()
2195 v.mustBe(Slice)
2196 if v.typ().Elem().Kind() != abi.Int32 {
2197 panic("reflect.Value.setRunes of non-rune slice")
2198 }
2199 *(*[]rune)(v.ptr) = x
2200 }
2201
2202
2203
2204
2205 func (v Value) SetComplex(x complex128) {
2206 v.mustBeAssignable()
2207 switch k := v.kind(); k {
2208 default:
2209 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
2210 case Complex64:
2211 *(*complex64)(v.ptr) = complex64(x)
2212 case Complex128:
2213 *(*complex128)(v.ptr) = x
2214 }
2215 }
2216
2217
2218
2219
2220 func (v Value) SetFloat(x float64) {
2221 v.mustBeAssignable()
2222 switch k := v.kind(); k {
2223 default:
2224 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
2225 case Float32:
2226 *(*float32)(v.ptr) = float32(x)
2227 case Float64:
2228 *(*float64)(v.ptr) = x
2229 }
2230 }
2231
2232
2233
2234
2235 func (v Value) SetInt(x int64) {
2236 v.mustBeAssignable()
2237 switch k := v.kind(); k {
2238 default:
2239 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
2240 case Int:
2241 *(*int)(v.ptr) = int(x)
2242 case Int8:
2243 *(*int8)(v.ptr) = int8(x)
2244 case Int16:
2245 *(*int16)(v.ptr) = int16(x)
2246 case Int32:
2247 *(*int32)(v.ptr) = int32(x)
2248 case Int64:
2249 *(*int64)(v.ptr) = x
2250 }
2251 }
2252
2253
2254
2255
2256
2257 func (v Value) SetLen(n int) {
2258 v.mustBeAssignable()
2259 v.mustBe(Slice)
2260 s := (*unsafeheader.Slice)(v.ptr)
2261 if uint(n) > uint(s.Cap) {
2262 panic("reflect: slice length out of range in SetLen")
2263 }
2264 s.Len = n
2265 }
2266
2267
2268
2269
2270
2271 func (v Value) SetCap(n int) {
2272 v.mustBeAssignable()
2273 v.mustBe(Slice)
2274 s := (*unsafeheader.Slice)(v.ptr)
2275 if n < s.Len || n > s.Cap {
2276 panic("reflect: slice capacity out of range in SetCap")
2277 }
2278 s.Cap = n
2279 }
2280
2281
2282
2283
2284 func (v Value) SetUint(x uint64) {
2285 v.mustBeAssignable()
2286 switch k := v.kind(); k {
2287 default:
2288 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2289 case Uint:
2290 *(*uint)(v.ptr) = uint(x)
2291 case Uint8:
2292 *(*uint8)(v.ptr) = uint8(x)
2293 case Uint16:
2294 *(*uint16)(v.ptr) = uint16(x)
2295 case Uint32:
2296 *(*uint32)(v.ptr) = uint32(x)
2297 case Uint64:
2298 *(*uint64)(v.ptr) = x
2299 case Uintptr:
2300 *(*uintptr)(v.ptr) = uintptr(x)
2301 }
2302 }
2303
2304
2305
2306
2307 func (v Value) SetPointer(x unsafe.Pointer) {
2308 v.mustBeAssignable()
2309 v.mustBe(UnsafePointer)
2310 *(*unsafe.Pointer)(v.ptr) = x
2311 }
2312
2313
2314
2315 func (v Value) SetString(x string) {
2316 v.mustBeAssignable()
2317 v.mustBe(String)
2318 *(*string)(v.ptr) = x
2319 }
2320
2321
2322
2323
2324 func (v Value) Slice(i, j int) Value {
2325 var (
2326 cap int
2327 typ *sliceType
2328 base unsafe.Pointer
2329 )
2330 switch kind := v.kind(); kind {
2331 default:
2332 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2333
2334 case Array:
2335 if v.flag&flagAddr == 0 {
2336 panic("reflect.Value.Slice: slice of unaddressable array")
2337 }
2338 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2339 cap = int(tt.Len)
2340 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2341 base = v.ptr
2342
2343 case Slice:
2344 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2345 s := (*unsafeheader.Slice)(v.ptr)
2346 base = s.Data
2347 cap = s.Cap
2348
2349 case String:
2350 s := (*unsafeheader.String)(v.ptr)
2351 if i < 0 || j < i || j > s.Len {
2352 panic("reflect.Value.Slice: string slice index out of bounds")
2353 }
2354 var t unsafeheader.String
2355 if i < s.Len {
2356 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2357 }
2358 return Value{v.typ(), unsafe.Pointer(&t), v.flag}
2359 }
2360
2361 if i < 0 || j < i || j > cap {
2362 panic("reflect.Value.Slice: slice index out of bounds")
2363 }
2364
2365
2366 var x []unsafe.Pointer
2367
2368
2369 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2370 s.Len = j - i
2371 s.Cap = cap - i
2372 if cap-i > 0 {
2373 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < cap")
2374 } else {
2375
2376 s.Data = base
2377 }
2378
2379 fl := v.flag.ro() | flagIndir | flag(Slice)
2380 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2381 }
2382
2383
2384
2385
2386 func (v Value) Slice3(i, j, k int) Value {
2387 var (
2388 cap int
2389 typ *sliceType
2390 base unsafe.Pointer
2391 )
2392 switch kind := v.kind(); kind {
2393 default:
2394 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2395
2396 case Array:
2397 if v.flag&flagAddr == 0 {
2398 panic("reflect.Value.Slice3: slice of unaddressable array")
2399 }
2400 tt := (*arrayType)(unsafe.Pointer(v.typ()))
2401 cap = int(tt.Len)
2402 typ = (*sliceType)(unsafe.Pointer(tt.Slice))
2403 base = v.ptr
2404
2405 case Slice:
2406 typ = (*sliceType)(unsafe.Pointer(v.typ()))
2407 s := (*unsafeheader.Slice)(v.ptr)
2408 base = s.Data
2409 cap = s.Cap
2410 }
2411
2412 if i < 0 || j < i || k < j || k > cap {
2413 panic("reflect.Value.Slice3: slice index out of bounds")
2414 }
2415
2416
2417
2418 var x []unsafe.Pointer
2419
2420
2421 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2422 s.Len = j - i
2423 s.Cap = k - i
2424 if k-i > 0 {
2425 s.Data = arrayAt(base, i, typ.Elem.Size(), "i < k <= cap")
2426 } else {
2427
2428 s.Data = base
2429 }
2430
2431 fl := v.flag.ro() | flagIndir | flag(Slice)
2432 return Value{typ.Common(), unsafe.Pointer(&x), fl}
2433 }
2434
2435
2436
2437
2438
2439
2440
2441 func (v Value) String() string {
2442
2443 if v.kind() == String {
2444 return *(*string)(v.ptr)
2445 }
2446 return v.stringNonString()
2447 }
2448
2449 func (v Value) stringNonString() string {
2450 if v.kind() == Invalid {
2451 return "<invalid Value>"
2452 }
2453
2454
2455 return "<" + v.Type().String() + " Value>"
2456 }
2457
2458
2459
2460
2461
2462
2463 func (v Value) TryRecv() (x Value, ok bool) {
2464 v.mustBe(Chan)
2465 v.mustBeExported()
2466 return v.recv(true)
2467 }
2468
2469
2470
2471
2472
2473 func (v Value) TrySend(x Value) bool {
2474 v.mustBe(Chan)
2475 v.mustBeExported()
2476 return v.send(x, true)
2477 }
2478
2479
2480 func (v Value) Type() Type {
2481 if v.flag != 0 && v.flag&flagMethod == 0 {
2482 return (*rtype)(abi.NoEscape(unsafe.Pointer(v.typ_)))
2483 }
2484 return v.typeSlow()
2485 }
2486
2487
2488 func (v Value) typeSlow() Type {
2489 return toRType(v.abiTypeSlow())
2490 }
2491
2492 func (v Value) abiType() *abi.Type {
2493 if v.flag != 0 && v.flag&flagMethod == 0 {
2494 return v.typ()
2495 }
2496 return v.abiTypeSlow()
2497 }
2498
2499 func (v Value) abiTypeSlow() *abi.Type {
2500 if v.flag == 0 {
2501 panic(&ValueError{"reflect.Value.Type", Invalid})
2502 }
2503
2504 typ := v.typ()
2505 if v.flag&flagMethod == 0 {
2506 return v.typ()
2507 }
2508
2509
2510
2511 i := int(v.flag) >> flagMethodShift
2512 if v.typ().Kind() == abi.Interface {
2513
2514 tt := (*interfaceType)(unsafe.Pointer(typ))
2515 if uint(i) >= uint(len(tt.Methods)) {
2516 panic("reflect: internal error: invalid method index")
2517 }
2518 m := &tt.Methods[i]
2519 return typeOffFor(typ, m.Typ)
2520 }
2521
2522 ms := typ.ExportedMethods()
2523 if uint(i) >= uint(len(ms)) {
2524 panic("reflect: internal error: invalid method index")
2525 }
2526 m := ms[i]
2527 return typeOffFor(typ, m.Mtyp)
2528 }
2529
2530
2531 func (v Value) CanUint() bool {
2532 switch v.kind() {
2533 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2534 return true
2535 default:
2536 return false
2537 }
2538 }
2539
2540
2541
2542 func (v Value) Uint() uint64 {
2543 k := v.kind()
2544 p := v.ptr
2545 switch k {
2546 case Uint:
2547 return uint64(*(*uint)(p))
2548 case Uint8:
2549 return uint64(*(*uint8)(p))
2550 case Uint16:
2551 return uint64(*(*uint16)(p))
2552 case Uint32:
2553 return uint64(*(*uint32)(p))
2554 case Uint64:
2555 return *(*uint64)(p)
2556 case Uintptr:
2557 return uint64(*(*uintptr)(p))
2558 }
2559 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2560 }
2561
2562
2563
2564
2565
2566
2567
2568
2569
2570
2571 func (v Value) UnsafeAddr() uintptr {
2572 if v.typ() == nil {
2573 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2574 }
2575 if v.flag&flagAddr == 0 {
2576 panic("reflect.Value.UnsafeAddr of unaddressable value")
2577 }
2578
2579 escapes(v.ptr)
2580 return uintptr(v.ptr)
2581 }
2582
2583
2584
2585
2586
2587
2588
2589
2590
2591
2592
2593
2594
2595
2596
2597 func (v Value) UnsafePointer() unsafe.Pointer {
2598 k := v.kind()
2599 switch k {
2600 case Pointer:
2601 if !v.typ().Pointers() {
2602
2603
2604 if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
2605 panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
2606 }
2607 return *(*unsafe.Pointer)(v.ptr)
2608 }
2609 fallthrough
2610 case Chan, Map, UnsafePointer:
2611 return v.pointer()
2612 case Func:
2613 if v.flag&flagMethod != 0 {
2614
2615
2616
2617
2618
2619
2620 code := methodValueCallCodePtr()
2621 return *(*unsafe.Pointer)(unsafe.Pointer(&code))
2622 }
2623 p := v.pointer()
2624
2625
2626 if p != nil {
2627 p = *(*unsafe.Pointer)(p)
2628 }
2629 return p
2630 case Slice:
2631 return (*unsafeheader.Slice)(v.ptr).Data
2632 case String:
2633 return (*unsafeheader.String)(v.ptr).Data
2634 }
2635 panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()})
2636 }
2637
2638
2639
2640
2641
2642
2643
2644 func (v Value) Fields() iter.Seq2[StructField, Value] {
2645 t := v.Type()
2646 if t.Kind() != Struct {
2647 panic("reflect: Fields of non-struct type " + t.String())
2648 }
2649 return func(yield func(StructField, Value) bool) {
2650 for i := range v.NumField() {
2651 if !yield(t.Field(i), v.Field(i)) {
2652 return
2653 }
2654 }
2655 }
2656 }
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669 func (v Value) Methods() iter.Seq2[Method, Value] {
2670 return func(yield func(Method, Value) bool) {
2671 rtype := v.Type()
2672 for i := range v.NumMethod() {
2673 if !yield(rtype.Method(i), v.Method(i)) {
2674 return
2675 }
2676 }
2677 }
2678 }
2679
2680
2681
2682
2683
2684
2685
2686
2687
2688 type StringHeader struct {
2689 Data uintptr
2690 Len int
2691 }
2692
2693
2694
2695
2696
2697
2698
2699
2700
2701 type SliceHeader struct {
2702 Data uintptr
2703 Len int
2704 Cap int
2705 }
2706
2707 func typesMustMatch(what string, t1, t2 Type) {
2708 if t1 != t2 {
2709 panic(what + ": " + t1.String() + " != " + t2.String())
2710 }
2711 }
2712
2713
2714
2715
2716
2717
2718
2719
2720 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2721 return add(p, uintptr(i)*eltSize, "i < len")
2722 }
2723
2724
2725
2726
2727
2728
2729
2730 func (v Value) Grow(n int) {
2731 v.mustBeAssignable()
2732 v.mustBe(Slice)
2733 v.grow(n)
2734 }
2735
2736
2737 func (v Value) grow(n int) {
2738 p := (*unsafeheader.Slice)(v.ptr)
2739 switch {
2740 case n < 0:
2741 panic("reflect.Value.Grow: negative len")
2742 case p.Len+n < 0:
2743 panic("reflect.Value.Grow: slice overflow")
2744 case p.Len+n > p.Cap:
2745 t := v.typ().Elem()
2746 *p = growslice(t, *p, n)
2747 }
2748 }
2749
2750
2751
2752
2753
2754
2755
2756 func (v Value) extendSlice(n int) Value {
2757 v.mustBeExported()
2758 v.mustBe(Slice)
2759
2760
2761 sh := *(*unsafeheader.Slice)(v.ptr)
2762 s := &sh
2763 v.ptr = unsafe.Pointer(s)
2764 v.flag = flagIndir | flag(Slice)
2765
2766 v.grow(n)
2767 s.Len += n
2768 return v
2769 }
2770
2771
2772
2773
2774 func (v Value) Clear() {
2775 switch v.Kind() {
2776 case Slice:
2777 sh := *(*unsafeheader.Slice)(v.ptr)
2778 st := (*sliceType)(unsafe.Pointer(v.typ()))
2779 typedarrayclear(st.Elem, sh.Data, sh.Len)
2780 case Map:
2781 mapclear(v.typ(), v.pointer())
2782 default:
2783 panic(&ValueError{"reflect.Value.Clear", v.Kind()})
2784 }
2785 }
2786
2787
2788
2789 func Append(s Value, x ...Value) Value {
2790 s.mustBe(Slice)
2791 n := s.Len()
2792 s = s.extendSlice(len(x))
2793 for i, v := range x {
2794 s.Index(n + i).Set(v)
2795 }
2796 return s
2797 }
2798
2799
2800
2801 func AppendSlice(s, t Value) Value {
2802 s.mustBe(Slice)
2803 t.mustBe(Slice)
2804 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2805 ns := s.Len()
2806 nt := t.Len()
2807 s = s.extendSlice(nt)
2808 Copy(s.Slice(ns, ns+nt), t)
2809 return s
2810 }
2811
2812
2813
2814
2815
2816
2817
2818
2819
2820 func Copy(dst, src Value) int {
2821 dk := dst.kind()
2822 if dk != Array && dk != Slice {
2823 panic(&ValueError{"reflect.Copy", dk})
2824 }
2825 if dk == Array {
2826 dst.mustBeAssignable()
2827 }
2828 dst.mustBeExported()
2829
2830 sk := src.kind()
2831 var stringCopy bool
2832 if sk != Array && sk != Slice {
2833 stringCopy = sk == String && dst.typ().Elem().Kind() == abi.Uint8
2834 if !stringCopy {
2835 panic(&ValueError{"reflect.Copy", sk})
2836 }
2837 }
2838 src.mustBeExported()
2839
2840 de := dst.typ().Elem()
2841 if !stringCopy {
2842 se := src.typ().Elem()
2843 typesMustMatch("reflect.Copy", toType(de), toType(se))
2844 }
2845
2846 var ds, ss unsafeheader.Slice
2847 if dk == Array {
2848 ds.Data = dst.ptr
2849 ds.Len = dst.Len()
2850 ds.Cap = ds.Len
2851 } else {
2852 ds = *(*unsafeheader.Slice)(dst.ptr)
2853 }
2854 if sk == Array {
2855 ss.Data = src.ptr
2856 ss.Len = src.Len()
2857 ss.Cap = ss.Len
2858 } else if sk == Slice {
2859 ss = *(*unsafeheader.Slice)(src.ptr)
2860 } else {
2861 sh := *(*unsafeheader.String)(src.ptr)
2862 ss.Data = sh.Data
2863 ss.Len = sh.Len
2864 ss.Cap = sh.Len
2865 }
2866
2867 return typedslicecopy(de.Common(), ds, ss)
2868 }
2869
2870
2871
2872 type runtimeSelect struct {
2873 dir SelectDir
2874 typ *rtype
2875 ch unsafe.Pointer
2876 val unsafe.Pointer
2877 }
2878
2879
2880
2881
2882
2883
2884
2885
2886
2887
2888
2889
2890 func rselect([]runtimeSelect) (chosen int, recvOK bool)
2891
2892
2893 type SelectDir int
2894
2895
2896
2897 const (
2898 _ SelectDir = iota
2899 SelectSend
2900 SelectRecv
2901 SelectDefault
2902 )
2903
2904
2905
2906
2907
2908
2909
2910
2911
2912
2913
2914
2915
2916
2917
2918
2919
2920 type SelectCase struct {
2921 Dir SelectDir
2922 Chan Value
2923 Send Value
2924 }
2925
2926
2927
2928
2929
2930
2931
2932
2933
2934 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
2935 if len(cases) > 65536 {
2936 panic("reflect.Select: too many cases (max 65536)")
2937 }
2938
2939
2940
2941 var runcases []runtimeSelect
2942 if len(cases) > 4 {
2943
2944 runcases = make([]runtimeSelect, len(cases))
2945 } else {
2946
2947 runcases = make([]runtimeSelect, len(cases), 4)
2948 }
2949
2950 haveDefault := false
2951 for i, c := range cases {
2952 rc := &runcases[i]
2953 rc.dir = c.Dir
2954 switch c.Dir {
2955 default:
2956 panic("reflect.Select: invalid Dir")
2957
2958 case SelectDefault:
2959 if haveDefault {
2960 panic("reflect.Select: multiple default cases")
2961 }
2962 haveDefault = true
2963 if c.Chan.IsValid() {
2964 panic("reflect.Select: default case has Chan value")
2965 }
2966 if c.Send.IsValid() {
2967 panic("reflect.Select: default case has Send value")
2968 }
2969
2970 case SelectSend:
2971 ch := c.Chan
2972 if !ch.IsValid() {
2973 break
2974 }
2975 ch.mustBe(Chan)
2976 ch.mustBeExported()
2977 tt := (*chanType)(unsafe.Pointer(ch.typ()))
2978 if ChanDir(tt.Dir)&SendDir == 0 {
2979 panic("reflect.Select: SendDir case using recv-only channel")
2980 }
2981 rc.ch = ch.pointer()
2982 rc.typ = toRType(&tt.Type)
2983 v := c.Send
2984 if !v.IsValid() {
2985 panic("reflect.Select: SendDir case missing Send value")
2986 }
2987 v.mustBeExported()
2988 v = v.assignTo("reflect.Select", tt.Elem, nil)
2989 if v.flag&flagIndir != 0 {
2990 rc.val = v.ptr
2991 } else {
2992 rc.val = unsafe.Pointer(&v.ptr)
2993 }
2994
2995
2996 escapes(rc.val)
2997
2998 case SelectRecv:
2999 if c.Send.IsValid() {
3000 panic("reflect.Select: RecvDir case has Send value")
3001 }
3002 ch := c.Chan
3003 if !ch.IsValid() {
3004 break
3005 }
3006 ch.mustBe(Chan)
3007 ch.mustBeExported()
3008 tt := (*chanType)(unsafe.Pointer(ch.typ()))
3009 if ChanDir(tt.Dir)&RecvDir == 0 {
3010 panic("reflect.Select: RecvDir case using send-only channel")
3011 }
3012 rc.ch = ch.pointer()
3013 rc.typ = toRType(&tt.Type)
3014 rc.val = unsafe_New(tt.Elem)
3015 }
3016 }
3017
3018 chosen, recvOK = rselect(runcases)
3019 if runcases[chosen].dir == SelectRecv {
3020 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
3021 t := tt.Elem
3022 p := runcases[chosen].val
3023 fl := flag(t.Kind())
3024 if !t.IsDirectIface() {
3025 recv = Value{t, p, fl | flagIndir}
3026 } else {
3027 recv = Value{t, *(*unsafe.Pointer)(p), fl}
3028 }
3029 }
3030 return chosen, recv, recvOK
3031 }
3032
3033
3036
3037
3038
3039
3040 func unsafe_New(*abi.Type) unsafe.Pointer
3041
3042
3043 func unsafe_NewArray(*abi.Type, int) unsafe.Pointer
3044
3045
3046
3047 func MakeSlice(typ Type, len, cap int) Value {
3048 if typ.Kind() != Slice {
3049 panic("reflect.MakeSlice of non-slice type")
3050 }
3051 if len < 0 {
3052 panic("reflect.MakeSlice: negative len")
3053 }
3054 if cap < 0 {
3055 panic("reflect.MakeSlice: negative cap")
3056 }
3057 if len > cap {
3058 panic("reflect.MakeSlice: len > cap")
3059 }
3060
3061 s := unsafeheader.Slice{Data: unsafe_NewArray(&(typ.Elem().(*rtype).t), cap), Len: len, Cap: cap}
3062 return Value{&typ.(*rtype).t, unsafe.Pointer(&s), flagIndir | flag(Slice)}
3063 }
3064
3065
3066
3067
3068
3069 func SliceAt(typ Type, p unsafe.Pointer, n int) Value {
3070 unsafeslice(typ.common(), p, n)
3071 s := unsafeheader.Slice{Data: p, Len: n, Cap: n}
3072 return Value{SliceOf(typ).common(), unsafe.Pointer(&s), flagIndir | flag(Slice)}
3073 }
3074
3075
3076 func MakeChan(typ Type, buffer int) Value {
3077 if typ.Kind() != Chan {
3078 panic("reflect.MakeChan of non-chan type")
3079 }
3080 if buffer < 0 {
3081 panic("reflect.MakeChan: negative buffer size")
3082 }
3083 if typ.ChanDir() != BothDir {
3084 panic("reflect.MakeChan: unidirectional channel type")
3085 }
3086 t := typ.common()
3087 ch := makechan(t, buffer)
3088 return Value{t, ch, flag(Chan)}
3089 }
3090
3091
3092 func MakeMap(typ Type) Value {
3093 return MakeMapWithSize(typ, 0)
3094 }
3095
3096
3097
3098 func MakeMapWithSize(typ Type, n int) Value {
3099 if typ.Kind() != Map {
3100 panic("reflect.MakeMapWithSize of non-map type")
3101 }
3102 t := typ.common()
3103 m := makemap(t, n)
3104 return Value{t, m, flag(Map)}
3105 }
3106
3107
3108
3109
3110 func Indirect(v Value) Value {
3111 if v.Kind() != Pointer {
3112 return v
3113 }
3114 return v.Elem()
3115 }
3116
3117
3118
3119 func ValueOf(i any) Value {
3120 if i == nil {
3121 return Value{}
3122 }
3123 return unpackEface(i)
3124 }
3125
3126
3127
3128
3129
3130
3131 func Zero(typ Type) Value {
3132 if typ == nil {
3133 panic("reflect: Zero(nil)")
3134 }
3135 t := &typ.(*rtype).t
3136 fl := flag(t.Kind())
3137 if !t.IsDirectIface() {
3138 var p unsafe.Pointer
3139 if t.Size() <= abi.ZeroValSize {
3140 p = unsafe.Pointer(&zeroVal[0])
3141 } else {
3142 p = unsafe_New(t)
3143 }
3144 return Value{t, p, fl | flagIndir}
3145 }
3146 return Value{t, nil, fl}
3147 }
3148
3149
3150 var zeroVal [abi.ZeroValSize]byte
3151
3152
3153
3154 func New(typ Type) Value {
3155 if typ == nil {
3156 panic("reflect: New(nil)")
3157 }
3158 t := &typ.(*rtype).t
3159 pt := ptrTo(t)
3160 if !pt.IsDirectIface() {
3161
3162 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
3163 }
3164 ptr := unsafe_New(t)
3165 fl := flag(Pointer)
3166 return Value{pt, ptr, fl}
3167 }
3168
3169
3170
3171 func NewAt(typ Type, p unsafe.Pointer) Value {
3172 fl := flag(Pointer)
3173 t := typ.(*rtype)
3174 return Value{t.ptrTo(), p, fl}
3175 }
3176
3177
3178
3179
3180
3181
3182 func (v Value) assignTo(context string, dst *abi.Type, target unsafe.Pointer) Value {
3183 if v.flag&flagMethod != 0 {
3184 v = makeMethodValue(context, v)
3185 }
3186
3187 switch {
3188 case directlyAssignable(dst, v.typ()):
3189
3190
3191 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
3192 fl |= flag(dst.Kind())
3193 return Value{dst, v.ptr, fl}
3194
3195 case implements(dst, v.typ()):
3196 if v.Kind() == Interface && v.IsNil() {
3197
3198
3199
3200 return Value{dst, nil, flag(Interface)}
3201 }
3202 x := valueInterface(v, false)
3203 if target == nil {
3204 target = unsafe_New(dst)
3205 }
3206 if dst.NumMethod() == 0 {
3207 *(*any)(target) = x
3208 } else {
3209 ifaceE2I(dst, x, target)
3210 }
3211 return Value{dst, target, flagIndir | flag(Interface)}
3212 }
3213
3214
3215 panic(context + ": value of type " + stringFor(v.typ()) + " is not assignable to type " + stringFor(dst))
3216 }
3217
3218
3219
3220
3221 func (v Value) Convert(t Type) Value {
3222 if v.flag&flagMethod != 0 {
3223 v = makeMethodValue("Convert", v)
3224 }
3225 op := convertOp(t.common(), v.typ())
3226 if op == nil {
3227 panic("reflect.Value.Convert: value of type " + stringFor(v.typ()) + " cannot be converted to type " + t.String())
3228 }
3229 return op(v, t)
3230 }
3231
3232
3233
3234 func (v Value) CanConvert(t Type) bool {
3235 vt := v.Type()
3236 if !vt.ConvertibleTo(t) {
3237 return false
3238 }
3239
3240
3241 switch {
3242 case vt.Kind() == Slice && t.Kind() == Array:
3243 if t.Len() > v.Len() {
3244 return false
3245 }
3246 case vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array:
3247 n := t.Elem().Len()
3248 if n > v.Len() {
3249 return false
3250 }
3251 }
3252 return true
3253 }
3254
3255
3256
3257
3258
3259 func (v Value) Comparable() bool {
3260 k := v.Kind()
3261 switch k {
3262 case Invalid:
3263 return false
3264
3265 case Array:
3266 switch v.Type().Elem().Kind() {
3267 case Interface, Array, Struct:
3268 for i := 0; i < v.Type().Len(); i++ {
3269 if !v.Index(i).Comparable() {
3270 return false
3271 }
3272 }
3273 return true
3274 }
3275 return v.Type().Comparable()
3276
3277 case Interface:
3278 return v.IsNil() || v.Elem().Comparable()
3279
3280 case Struct:
3281 for _, value := range v.Fields() {
3282 if !value.Comparable() {
3283 return false
3284 }
3285 }
3286 return true
3287
3288 default:
3289 return v.Type().Comparable()
3290 }
3291 }
3292
3293
3294
3295
3296
3297
3298
3299
3300
3301 func (v Value) Equal(u Value) bool {
3302 if v.Kind() == Interface {
3303 v = v.Elem()
3304 }
3305 if u.Kind() == Interface {
3306 u = u.Elem()
3307 }
3308
3309 if !v.IsValid() || !u.IsValid() {
3310 return v.IsValid() == u.IsValid()
3311 }
3312
3313 if v.Kind() != u.Kind() || v.Type() != u.Type() {
3314 return false
3315 }
3316
3317
3318
3319 switch v.Kind() {
3320 default:
3321 panic("reflect.Value.Equal: invalid Kind")
3322 case Bool:
3323 return v.Bool() == u.Bool()
3324 case Int, Int8, Int16, Int32, Int64:
3325 return v.Int() == u.Int()
3326 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3327 return v.Uint() == u.Uint()
3328 case Float32, Float64:
3329 return v.Float() == u.Float()
3330 case Complex64, Complex128:
3331 return v.Complex() == u.Complex()
3332 case String:
3333 return v.String() == u.String()
3334 case Chan, Pointer, UnsafePointer:
3335 return v.Pointer() == u.Pointer()
3336 case Array:
3337
3338 vl := v.Len()
3339 if vl == 0 {
3340
3341 if !v.Type().Elem().Comparable() {
3342 break
3343 }
3344 return true
3345 }
3346 for i := 0; i < vl; i++ {
3347 if !v.Index(i).Equal(u.Index(i)) {
3348 return false
3349 }
3350 }
3351 return true
3352 case Struct:
3353
3354 nf := v.NumField()
3355 for i := 0; i < nf; i++ {
3356 if !v.Field(i).Equal(u.Field(i)) {
3357 return false
3358 }
3359 }
3360 return true
3361 case Func, Map, Slice:
3362 break
3363 }
3364 panic("reflect.Value.Equal: values of type " + v.Type().String() + " are not comparable")
3365 }
3366
3367
3368
3369 func convertOp(dst, src *abi.Type) func(Value, Type) Value {
3370 switch Kind(src.Kind()) {
3371 case Int, Int8, Int16, Int32, Int64:
3372 switch Kind(dst.Kind()) {
3373 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3374 return cvtInt
3375 case Float32, Float64:
3376 return cvtIntFloat
3377 case String:
3378 return cvtIntString
3379 }
3380
3381 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3382 switch Kind(dst.Kind()) {
3383 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3384 return cvtUint
3385 case Float32, Float64:
3386 return cvtUintFloat
3387 case String:
3388 return cvtUintString
3389 }
3390
3391 case Float32, Float64:
3392 switch Kind(dst.Kind()) {
3393 case Int, Int8, Int16, Int32, Int64:
3394 return cvtFloatInt
3395 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3396 return cvtFloatUint
3397 case Float32, Float64:
3398 return cvtFloat
3399 }
3400
3401 case Complex64, Complex128:
3402 switch Kind(dst.Kind()) {
3403 case Complex64, Complex128:
3404 return cvtComplex
3405 }
3406
3407 case String:
3408 if dst.Kind() == abi.Slice && pkgPathFor(dst.Elem()) == "" {
3409 switch Kind(dst.Elem().Kind()) {
3410 case Uint8:
3411 return cvtStringBytes
3412 case Int32:
3413 return cvtStringRunes
3414 }
3415 }
3416
3417 case Slice:
3418 if dst.Kind() == abi.String && pkgPathFor(src.Elem()) == "" {
3419 switch Kind(src.Elem().Kind()) {
3420 case Uint8:
3421 return cvtBytesString
3422 case Int32:
3423 return cvtRunesString
3424 }
3425 }
3426
3427
3428 if dst.Kind() == abi.Pointer && dst.Elem().Kind() == abi.Array && src.Elem() == dst.Elem().Elem() {
3429 return cvtSliceArrayPtr
3430 }
3431
3432
3433 if dst.Kind() == abi.Array && src.Elem() == dst.Elem() {
3434 return cvtSliceArray
3435 }
3436
3437 case Chan:
3438 if dst.Kind() == abi.Chan && specialChannelAssignability(dst, src) {
3439 return cvtDirect
3440 }
3441 }
3442
3443
3444 if haveIdenticalUnderlyingType(dst, src, false) {
3445 return cvtDirect
3446 }
3447
3448
3449 if dst.Kind() == abi.Pointer && nameFor(dst) == "" &&
3450 src.Kind() == abi.Pointer && nameFor(src) == "" &&
3451 haveIdenticalUnderlyingType(elem(dst), elem(src), false) {
3452 return cvtDirect
3453 }
3454
3455 if implements(dst, src) {
3456 if src.Kind() == abi.Interface {
3457 return cvtI2I
3458 }
3459 return cvtT2I
3460 }
3461
3462 return nil
3463 }
3464
3465
3466
3467 func makeInt(f flag, bits uint64, t Type) Value {
3468 typ := t.common()
3469 ptr := unsafe_New(typ)
3470 switch typ.Size() {
3471 case 1:
3472 *(*uint8)(ptr) = uint8(bits)
3473 case 2:
3474 *(*uint16)(ptr) = uint16(bits)
3475 case 4:
3476 *(*uint32)(ptr) = uint32(bits)
3477 case 8:
3478 *(*uint64)(ptr) = bits
3479 }
3480 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3481 }
3482
3483
3484
3485 func makeFloat(f flag, v float64, t Type) Value {
3486 typ := t.common()
3487 ptr := unsafe_New(typ)
3488 switch typ.Size() {
3489 case 4:
3490 *(*float32)(ptr) = float32(v)
3491 case 8:
3492 *(*float64)(ptr) = v
3493 }
3494 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3495 }
3496
3497
3498 func makeFloat32(f flag, v float32, t Type) Value {
3499 typ := t.common()
3500 ptr := unsafe_New(typ)
3501 *(*float32)(ptr) = v
3502 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3503 }
3504
3505
3506
3507 func makeComplex(f flag, v complex128, t Type) Value {
3508 typ := t.common()
3509 ptr := unsafe_New(typ)
3510 switch typ.Size() {
3511 case 8:
3512 *(*complex64)(ptr) = complex64(v)
3513 case 16:
3514 *(*complex128)(ptr) = v
3515 }
3516 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3517 }
3518
3519 func makeString(f flag, v string, t Type) Value {
3520 ret := New(t).Elem()
3521 ret.SetString(v)
3522 ret.flag = ret.flag&^flagAddr | f
3523 return ret
3524 }
3525
3526 func makeBytes(f flag, v []byte, t Type) Value {
3527 ret := New(t).Elem()
3528 ret.SetBytes(v)
3529 ret.flag = ret.flag&^flagAddr | f
3530 return ret
3531 }
3532
3533 func makeRunes(f flag, v []rune, t Type) Value {
3534 ret := New(t).Elem()
3535 ret.setRunes(v)
3536 ret.flag = ret.flag&^flagAddr | f
3537 return ret
3538 }
3539
3540
3541
3542
3543
3544
3545
3546 func cvtInt(v Value, t Type) Value {
3547 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3548 }
3549
3550
3551 func cvtUint(v Value, t Type) Value {
3552 return makeInt(v.flag.ro(), v.Uint(), t)
3553 }
3554
3555
3556 func cvtFloatInt(v Value, t Type) Value {
3557 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3558 }
3559
3560
3561 func cvtFloatUint(v Value, t Type) Value {
3562 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3563 }
3564
3565
3566 func cvtIntFloat(v Value, t Type) Value {
3567 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3568 }
3569
3570
3571 func cvtUintFloat(v Value, t Type) Value {
3572 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3573 }
3574
3575
3576 func cvtFloat(v Value, t Type) Value {
3577 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3578
3579
3580
3581 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3582 }
3583 return makeFloat(v.flag.ro(), v.Float(), t)
3584 }
3585
3586
3587 func cvtComplex(v Value, t Type) Value {
3588 return makeComplex(v.flag.ro(), v.Complex(), t)
3589 }
3590
3591
3592 func cvtIntString(v Value, t Type) Value {
3593 s := "\uFFFD"
3594 if x := v.Int(); int64(rune(x)) == x {
3595 s = string(rune(x))
3596 }
3597 return makeString(v.flag.ro(), s, t)
3598 }
3599
3600
3601 func cvtUintString(v Value, t Type) Value {
3602 s := "\uFFFD"
3603 if x := v.Uint(); uint64(rune(x)) == x {
3604 s = string(rune(x))
3605 }
3606 return makeString(v.flag.ro(), s, t)
3607 }
3608
3609
3610 func cvtBytesString(v Value, t Type) Value {
3611 return makeString(v.flag.ro(), string(v.Bytes()), t)
3612 }
3613
3614
3615 func cvtStringBytes(v Value, t Type) Value {
3616 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3617 }
3618
3619
3620 func cvtRunesString(v Value, t Type) Value {
3621 return makeString(v.flag.ro(), string(v.runes()), t)
3622 }
3623
3624
3625 func cvtStringRunes(v Value, t Type) Value {
3626 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3627 }
3628
3629
3630 func cvtSliceArrayPtr(v Value, t Type) Value {
3631 n := t.Elem().Len()
3632 if n > v.Len() {
3633 panic("reflect: cannot convert slice with length " + strconv.Itoa(v.Len()) + " to pointer to array with length " + strconv.Itoa(n))
3634 }
3635 h := (*unsafeheader.Slice)(v.ptr)
3636 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
3637 }
3638
3639
3640 func cvtSliceArray(v Value, t Type) Value {
3641 n := t.Len()
3642 if n > v.Len() {
3643 panic("reflect: cannot convert slice with length " + strconv.Itoa(v.Len()) + " to array with length " + strconv.Itoa(n))
3644 }
3645 h := (*unsafeheader.Slice)(v.ptr)
3646 typ := t.common()
3647 ptr := h.Data
3648 c := unsafe_New(typ)
3649 typedmemmove(typ, c, ptr)
3650 ptr = c
3651
3652 return Value{typ, ptr, v.flag&^(flagAddr|flagKindMask) | flag(Array)}
3653 }
3654
3655
3656 func cvtDirect(v Value, typ Type) Value {
3657 f := v.flag
3658 t := typ.common()
3659 ptr := v.ptr
3660 if f&flagAddr != 0 {
3661
3662 c := unsafe_New(t)
3663 typedmemmove(t, c, ptr)
3664 ptr = c
3665 f &^= flagAddr
3666 }
3667 return Value{t, ptr, v.flag.ro() | f}
3668 }
3669
3670
3671 func cvtT2I(v Value, typ Type) Value {
3672 target := unsafe_New(typ.common())
3673 x := valueInterface(v, false)
3674 if typ.NumMethod() == 0 {
3675 *(*any)(target) = x
3676 } else {
3677 ifaceE2I(typ.common(), x, target)
3678 }
3679 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3680 }
3681
3682
3683 func cvtI2I(v Value, typ Type) Value {
3684 if v.IsNil() {
3685 ret := Zero(typ)
3686 ret.flag |= v.flag.ro()
3687 return ret
3688 }
3689 return cvtT2I(v.Elem(), typ)
3690 }
3691
3692
3693
3694
3695 func chancap(ch unsafe.Pointer) int
3696
3697
3698 func chanclose(ch unsafe.Pointer)
3699
3700
3701 func chanlen(ch unsafe.Pointer) int
3702
3703
3704
3705
3706
3707
3708
3709
3710
3711 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3712
3713
3714 func chansend0(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3715
3716 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool {
3717 contentEscapes(val)
3718 return chansend0(ch, val, nb)
3719 }
3720
3721 func makechan(typ *abi.Type, size int) (ch unsafe.Pointer)
3722 func makemap(t *abi.Type, cap int) (m unsafe.Pointer)
3723
3724
3725 func mapaccess(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3726
3727
3728 func mapaccess_faststr(t *abi.Type, m unsafe.Pointer, key string) (val unsafe.Pointer)
3729
3730
3731 func mapassign0(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer)
3732
3733
3734
3735
3736
3737
3738
3739
3740
3741
3742
3743 func mapassign(t *abi.Type, m unsafe.Pointer, key, val unsafe.Pointer) {
3744 contentEscapes(key)
3745 contentEscapes(val)
3746 mapassign0(t, m, key, val)
3747 }
3748
3749
3750 func mapassign_faststr0(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer)
3751
3752 func mapassign_faststr(t *abi.Type, m unsafe.Pointer, key string, val unsafe.Pointer) {
3753 contentEscapes((*unsafeheader.String)(unsafe.Pointer(&key)).Data)
3754 contentEscapes(val)
3755 mapassign_faststr0(t, m, key, val)
3756 }
3757
3758
3759 func mapdelete(t *abi.Type, m unsafe.Pointer, key unsafe.Pointer)
3760
3761
3762 func mapdelete_faststr(t *abi.Type, m unsafe.Pointer, key string)
3763
3764
3765 func maplen(m unsafe.Pointer) int
3766
3767 func mapclear(t *abi.Type, m unsafe.Pointer)
3768
3769
3770
3771
3772
3773
3774
3775
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789
3790
3791
3792
3793
3794
3795 func call(stackArgsType *abi.Type, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3796
3797 func ifaceE2I(t *abi.Type, src any, dst unsafe.Pointer)
3798
3799
3800
3801
3802 func memmove(dst, src unsafe.Pointer, size uintptr)
3803
3804
3805
3806
3807 func typedmemmove(t *abi.Type, dst, src unsafe.Pointer)
3808
3809
3810
3811
3812 func typedmemclr(t *abi.Type, ptr unsafe.Pointer)
3813
3814
3815
3816
3817
3818 func typedmemclrpartial(t *abi.Type, ptr unsafe.Pointer, off, size uintptr)
3819
3820
3821
3822
3823
3824 func typedslicecopy(t *abi.Type, dst, src unsafeheader.Slice) int
3825
3826
3827
3828
3829
3830 func typedarrayclear(elemType *abi.Type, ptr unsafe.Pointer, len int)
3831
3832
3833 func typehash(t *abi.Type, p unsafe.Pointer, h uintptr) uintptr
3834
3835 func verifyNotInHeapPtr(p uintptr) bool
3836
3837
3838 func growslice(t *abi.Type, old unsafeheader.Slice, num int) unsafeheader.Slice
3839
3840
3841 func unsafeslice(t *abi.Type, ptr unsafe.Pointer, len int)
3842
3843
3844
3845
3846 func escapes(x any) {
3847 if dummy.b {
3848 dummy.x = x
3849 }
3850 }
3851
3852 var dummy struct {
3853 b bool
3854 x any
3855 }
3856
3857
3858
3859
3860
3861 func contentEscapes(x unsafe.Pointer) {
3862 if dummy.b {
3863 escapes(*(*any)(x))
3864 }
3865 }
3866
View as plain text