Source file src/internal/runtime/maps/runtime_alg.go

     1  // Copyright 2026 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package maps
     6  
     7  import (
     8  	"internal/byteorder"
     9  	"internal/cpu"
    10  	"internal/goarch"
    11  	"unsafe"
    12  )
    13  
    14  // runtime variable to check if the processor we're running on
    15  // actually supports the instructions used by the AES-based
    16  // hash implementation.
    17  var UseAeshash bool
    18  
    19  const hashRandomBytes = goarch.PtrSize / 4 * 64
    20  
    21  // used to seed the hash function
    22  var aeskeysched [hashRandomBytes]byte
    23  
    24  // used in hash{32,64}.go to seed the hash function
    25  var hashkey [4]uintptr
    26  
    27  func AlgInit() {
    28  	// Always intialize hashkey.
    29  	//
    30  	// See #78073
    31  	for i := range hashkey {
    32  		hashkey[i] = uintptr(bootstrapRand())
    33  	}
    34  
    35  	// Install AES hash algorithms if the instructions needed are present.
    36  	if (goarch.GOARCH == "386" || goarch.GOARCH == "amd64") &&
    37  		cpu.X86.HasAES && // AESENC
    38  		cpu.X86.HasSSSE3 && // PSHUFB
    39  		cpu.X86.HasSSE41 { // PINSR{D,Q}
    40  
    41  		// In aeshashbody (that is used by memhash & strhash)
    42  		// we have global variables that should be properly aligned.
    43  		//
    44  		// See #12415
    45  		if !checkMasksAndShiftsAlignment() {
    46  			fatal("maps: global variables for AES hashing are not properly aligned!")
    47  		}
    48  		initAlgAES()
    49  
    50  		if memHashUsesVAES {
    51  			// We are using intrinsics hash implementation.
    52  			// Override the UseAeshash in this case, since it uses VAES (AVX) instructions.
    53  			// While assembly implementation used AES-NI instructions,
    54  			// simd intrinsics only provide access to AVX ones.
    55  			UseAeshash = cpu.X86.HasAVX
    56  		}
    57  		return
    58  	}
    59  	if goarch.GOARCH == "arm64" && cpu.ARM64.HasAES {
    60  		initAlgAES()
    61  		return
    62  	}
    63  }
    64  
    65  func initAlgAES() {
    66  	UseAeshash = true
    67  	// Initialize with random data so hash collisions will be hard to engineer.
    68  	key := (*[hashRandomBytes / 8]uint64)(unsafe.Pointer(&aeskeysched))
    69  	for i := range key {
    70  		key[i] = bootstrapRand()
    71  	}
    72  }
    73  
    74  func strHashFallback(a unsafe.Pointer, h uintptr) uintptr {
    75  	type stringStruct struct {
    76  		str unsafe.Pointer
    77  		len int
    78  	}
    79  	x := (*stringStruct)(a)
    80  	return memHashFallback(x.str, h, uintptr(x.len))
    81  }
    82  
    83  //go:nosplit
    84  func add(p unsafe.Pointer, x uintptr) unsafe.Pointer {
    85  	return unsafe.Pointer(uintptr(p) + x)
    86  }
    87  
    88  // Note: These routines perform the read with a native endianness.
    89  func readUnaligned32(p unsafe.Pointer) uint32 {
    90  	q := (*[4]byte)(p)
    91  	if goarch.BigEndian {
    92  		return byteorder.BEUint32(q[:])
    93  	}
    94  	return byteorder.LEUint32(q[:])
    95  }
    96  
    97  func readUnaligned64(p unsafe.Pointer) uint64 {
    98  	q := (*[8]byte)(p)
    99  	if goarch.BigEndian {
   100  		return byteorder.BEUint64(q[:])
   101  	}
   102  	return byteorder.LEUint64(q[:])
   103  }
   104  

View as plain text